repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.encode_fetch_request
def encode_fetch_request(cls, client_id, correlation_id, payloads=None, max_wait_time=100, min_bytes=4096): """ Encodes some FetchRequest structs :param bytes client_id: :param int correlation_id: :param list payloads: list of :class:`FetchRequest` ...
python
def encode_fetch_request(cls, client_id, correlation_id, payloads=None, max_wait_time=100, min_bytes=4096): """ Encodes some FetchRequest structs :param bytes client_id: :param int correlation_id: :param list payloads: list of :class:`FetchRequest` ...
[ "def", "encode_fetch_request", "(", "cls", ",", "client_id", ",", "correlation_id", ",", "payloads", "=", "None", ",", "max_wait_time", "=", "100", ",", "min_bytes", "=", "4096", ")", ":", "payloads", "=", "[", "]", "if", "payloads", "is", "None", "else", ...
Encodes some FetchRequest structs :param bytes client_id: :param int correlation_id: :param list payloads: list of :class:`FetchRequest` :param int max_wait_time: how long to block waiting on min_bytes of data :param int min_bytes: the minimum number of bytes to accu...
[ "Encodes", "some", "FetchRequest", "structs" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L268-L300
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.decode_fetch_response
def decode_fetch_response(cls, data): """ Decode bytes to a FetchResponse :param bytes data: bytes to decode """ ((correlation_id, num_topics), cur) = relative_unpack('>ii', data, 0) for _i in range(num_topics): (topic, cur) = read_short_ascii(data, cur) ...
python
def decode_fetch_response(cls, data): """ Decode bytes to a FetchResponse :param bytes data: bytes to decode """ ((correlation_id, num_topics), cur) = relative_unpack('>ii', data, 0) for _i in range(num_topics): (topic, cur) = read_short_ascii(data, cur) ...
[ "def", "decode_fetch_response", "(", "cls", ",", "data", ")", ":", "(", "(", "correlation_id", ",", "num_topics", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>ii'", ",", "data", ",", "0", ")", "for", "_i", "in", "range", "(", "num_topics", ")"...
Decode bytes to a FetchResponse :param bytes data: bytes to decode
[ "Decode", "bytes", "to", "a", "FetchResponse" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L303-L324
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.decode_offset_response
def decode_offset_response(cls, data): """ Decode bytes to an :class:`OffsetResponse` :param bytes data: bytes to decode """ ((correlation_id, num_topics), cur) = relative_unpack('>ii', data, 0) for _i in range(num_topics): (topic, cur) = read_short_ascii(da...
python
def decode_offset_response(cls, data): """ Decode bytes to an :class:`OffsetResponse` :param bytes data: bytes to decode """ ((correlation_id, num_topics), cur) = relative_unpack('>ii', data, 0) for _i in range(num_topics): (topic, cur) = read_short_ascii(da...
[ "def", "decode_offset_response", "(", "cls", ",", "data", ")", ":", "(", "(", "correlation_id", ",", "num_topics", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>ii'", ",", "data", ",", "0", ")", "for", "_i", "in", "range", "(", "num_topics", ")...
Decode bytes to an :class:`OffsetResponse` :param bytes data: bytes to decode
[ "Decode", "bytes", "to", "an", ":", "class", ":", "OffsetResponse" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L348-L369
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.encode_metadata_request
def encode_metadata_request(cls, client_id, correlation_id, topics=None): """ Encode a MetadataRequest :param bytes client_id: string :param int correlation_id: int :param list topics: list of text """ topics = [] if topics is None else topics message = [...
python
def encode_metadata_request(cls, client_id, correlation_id, topics=None): """ Encode a MetadataRequest :param bytes client_id: string :param int correlation_id: int :param list topics: list of text """ topics = [] if topics is None else topics message = [...
[ "def", "encode_metadata_request", "(", "cls", ",", "client_id", ",", "correlation_id", ",", "topics", "=", "None", ")", ":", "topics", "=", "[", "]", "if", "topics", "is", "None", "else", "topics", "message", "=", "[", "cls", ".", "_encode_message_header", ...
Encode a MetadataRequest :param bytes client_id: string :param int correlation_id: int :param list topics: list of text
[ "Encode", "a", "MetadataRequest" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L372-L388
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.decode_metadata_response
def decode_metadata_response(cls, data): """ Decode bytes to a MetadataResponse :param bytes data: bytes to decode """ ((correlation_id, numbrokers), cur) = relative_unpack('>ii', data, 0) # In testing, I saw this routine swap my machine to death when # passed b...
python
def decode_metadata_response(cls, data): """ Decode bytes to a MetadataResponse :param bytes data: bytes to decode """ ((correlation_id, numbrokers), cur) = relative_unpack('>ii', data, 0) # In testing, I saw this routine swap my machine to death when # passed b...
[ "def", "decode_metadata_response", "(", "cls", ",", "data", ")", ":", "(", "(", "correlation_id", ",", "numbrokers", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>ii'", ",", "data", ",", "0", ")", "# In testing, I saw this routine swap my machine to death ...
Decode bytes to a MetadataResponse :param bytes data: bytes to decode
[ "Decode", "bytes", "to", "a", "MetadataResponse" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L391-L441
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.encode_consumermetadata_request
def encode_consumermetadata_request(cls, client_id, correlation_id, consumer_group): """ Encode a ConsumerMetadataRequest :param bytes client_id: string :param int correlation_id: int :param str consumer_group: string """ m...
python
def encode_consumermetadata_request(cls, client_id, correlation_id, consumer_group): """ Encode a ConsumerMetadataRequest :param bytes client_id: string :param int correlation_id: int :param str consumer_group: string """ m...
[ "def", "encode_consumermetadata_request", "(", "cls", ",", "client_id", ",", "correlation_id", ",", "consumer_group", ")", ":", "message", "=", "cls", ".", "_encode_message_header", "(", "client_id", ",", "correlation_id", ",", "KafkaCodec", ".", "CONSUMER_METADATA_KE...
Encode a ConsumerMetadataRequest :param bytes client_id: string :param int correlation_id: int :param str consumer_group: string
[ "Encode", "a", "ConsumerMetadataRequest" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L444-L456
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.decode_consumermetadata_response
def decode_consumermetadata_response(cls, data): """ Decode bytes to a ConsumerMetadataResponse :param bytes data: bytes to decode """ (correlation_id, error_code, node_id), cur = \ relative_unpack('>ihi', data, 0) host, cur = read_short_ascii(data, cur) ...
python
def decode_consumermetadata_response(cls, data): """ Decode bytes to a ConsumerMetadataResponse :param bytes data: bytes to decode """ (correlation_id, error_code, node_id), cur = \ relative_unpack('>ihi', data, 0) host, cur = read_short_ascii(data, cur) ...
[ "def", "decode_consumermetadata_response", "(", "cls", ",", "data", ")", ":", "(", "correlation_id", ",", "error_code", ",", "node_id", ")", ",", "cur", "=", "relative_unpack", "(", "'>ihi'", ",", "data", ",", "0", ")", "host", ",", "cur", "=", "read_short...
Decode bytes to a ConsumerMetadataResponse :param bytes data: bytes to decode
[ "Decode", "bytes", "to", "a", "ConsumerMetadataResponse" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L459-L471
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.encode_offset_commit_request
def encode_offset_commit_request(cls, client_id, correlation_id, group, group_generation_id, consumer_id, payloads): """ Encode some OffsetCommitRequest structs (v1) :param bytes client_id: string :param int corre...
python
def encode_offset_commit_request(cls, client_id, correlation_id, group, group_generation_id, consumer_id, payloads): """ Encode some OffsetCommitRequest structs (v1) :param bytes client_id: string :param int corre...
[ "def", "encode_offset_commit_request", "(", "cls", ",", "client_id", ",", "correlation_id", ",", "group", ",", "group_generation_id", ",", "consumer_id", ",", "payloads", ")", ":", "grouped_payloads", "=", "group_by_topic_and_partition", "(", "payloads", ")", "message...
Encode some OffsetCommitRequest structs (v1) :param bytes client_id: string :param int correlation_id: int :param str group: the consumer group to which you are committing offsets :param int group_generation_id: int32, generation ID of the group :param str consumer_id: string, I...
[ "Encode", "some", "OffsetCommitRequest", "structs", "(", "v1", ")" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L474-L508
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.decode_offset_commit_response
def decode_offset_commit_response(cls, data): """ Decode bytes to an OffsetCommitResponse :param bytes data: bytes to decode """ ((correlation_id,), cur) = relative_unpack('>i', data, 0) ((num_topics,), cur) = relative_unpack('>i', data, cur) for _i in range(num...
python
def decode_offset_commit_response(cls, data): """ Decode bytes to an OffsetCommitResponse :param bytes data: bytes to decode """ ((correlation_id,), cur) = relative_unpack('>i', data, 0) ((num_topics,), cur) = relative_unpack('>i', data, cur) for _i in range(num...
[ "def", "decode_offset_commit_response", "(", "cls", ",", "data", ")", ":", "(", "(", "correlation_id", ",", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>i'", ",", "data", ",", "0", ")", "(", "(", "num_topics", ",", ")", ",", "cur", ")", "=",...
Decode bytes to an OffsetCommitResponse :param bytes data: bytes to decode
[ "Decode", "bytes", "to", "an", "OffsetCommitResponse" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L511-L526
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.encode_offset_fetch_request
def encode_offset_fetch_request(cls, client_id, correlation_id, group, payloads): """ Encode some OffsetFetchRequest structs :param bytes client_id: string :param int correlation_id: int :param bytes group: string, the consumer group you are f...
python
def encode_offset_fetch_request(cls, client_id, correlation_id, group, payloads): """ Encode some OffsetFetchRequest structs :param bytes client_id: string :param int correlation_id: int :param bytes group: string, the consumer group you are f...
[ "def", "encode_offset_fetch_request", "(", "cls", ",", "client_id", ",", "correlation_id", ",", "group", ",", "payloads", ")", ":", "grouped_payloads", "=", "group_by_topic_and_partition", "(", "payloads", ")", "message", "=", "cls", ".", "_encode_message_header", "...
Encode some OffsetFetchRequest structs :param bytes client_id: string :param int correlation_id: int :param bytes group: string, the consumer group you are fetching offsets for :param list payloads: list of :class:`OffsetFetchRequest`
[ "Encode", "some", "OffsetFetchRequest", "structs" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L529-L554
ciena/afkak
afkak/kafkacodec.py
KafkaCodec.decode_offset_fetch_response
def decode_offset_fetch_response(cls, data): """ Decode bytes to an OffsetFetchResponse :param bytes data: bytes to decode """ ((correlation_id,), cur) = relative_unpack('>i', data, 0) ((num_topics,), cur) = relative_unpack('>i', data, cur) for _i in range(num_...
python
def decode_offset_fetch_response(cls, data): """ Decode bytes to an OffsetFetchResponse :param bytes data: bytes to decode """ ((correlation_id,), cur) = relative_unpack('>i', data, 0) ((num_topics,), cur) = relative_unpack('>i', data, cur) for _i in range(num_...
[ "def", "decode_offset_fetch_response", "(", "cls", ",", "data", ")", ":", "(", "(", "correlation_id", ",", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>i'", ",", "data", ",", "0", ")", "(", "(", "num_topics", ",", ")", ",", "cur", ")", "=", ...
Decode bytes to an OffsetFetchResponse :param bytes data: bytes to decode
[ "Decode", "bytes", "to", "an", "OffsetFetchResponse" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/kafkacodec.py#L557-L577
ciena/afkak
afkak/_protocol.py
KafkaBootstrapProtocol.stringReceived
def stringReceived(self, response): """ Handle a response from the broker. """ correlation_id = response[0:4] try: d = self._pending.pop(correlation_id) except KeyError: self._log.warn(( "Response has unknown correlation ID {correla...
python
def stringReceived(self, response): """ Handle a response from the broker. """ correlation_id = response[0:4] try: d = self._pending.pop(correlation_id) except KeyError: self._log.warn(( "Response has unknown correlation ID {correla...
[ "def", "stringReceived", "(", "self", ",", "response", ")", ":", "correlation_id", "=", "response", "[", "0", ":", "4", "]", "try", ":", "d", "=", "self", ".", "_pending", ".", "pop", "(", "correlation_id", ")", "except", "KeyError", ":", "self", ".", ...
Handle a response from the broker.
[ "Handle", "a", "response", "from", "the", "broker", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/_protocol.py#L78-L92
ciena/afkak
afkak/_protocol.py
KafkaBootstrapProtocol.connectionLost
def connectionLost(self, reason=connectionDone): """ Mark the protocol as failed and fail all pending operations. """ self._failed = reason pending, self._pending = self._pending, None for d in pending.values(): d.errback(reason)
python
def connectionLost(self, reason=connectionDone): """ Mark the protocol as failed and fail all pending operations. """ self._failed = reason pending, self._pending = self._pending, None for d in pending.values(): d.errback(reason)
[ "def", "connectionLost", "(", "self", ",", "reason", "=", "connectionDone", ")", ":", "self", ".", "_failed", "=", "reason", "pending", ",", "self", ".", "_pending", "=", "self", ".", "_pending", ",", "None", "for", "d", "in", "pending", ".", "values", ...
Mark the protocol as failed and fail all pending operations.
[ "Mark", "the", "protocol", "as", "failed", "and", "fail", "all", "pending", "operations", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/_protocol.py#L94-L101
ciena/afkak
afkak/_protocol.py
KafkaBootstrapProtocol.request
def request(self, request): """ Send a request to the Kafka broker. :param bytes request: The bytes of a Kafka `RequestMessage`_ structure. It must have a unique (to this connection) correlation ID. :returns: `Deferred` which will: - S...
python
def request(self, request): """ Send a request to the Kafka broker. :param bytes request: The bytes of a Kafka `RequestMessage`_ structure. It must have a unique (to this connection) correlation ID. :returns: `Deferred` which will: - S...
[ "def", "request", "(", "self", ",", "request", ")", ":", "if", "self", ".", "_failed", "is", "not", "None", ":", "return", "fail", "(", "self", ".", "_failed", ")", "correlation_id", "=", "request", "[", "4", ":", "8", "]", "assert", "correlation_id", ...
Send a request to the Kafka broker. :param bytes request: The bytes of a Kafka `RequestMessage`_ structure. It must have a unique (to this connection) correlation ID. :returns: `Deferred` which will: - Succeed with the bytes of a Kafka `ResponseMessag...
[ "Send", "a", "request", "to", "the", "Kafka", "broker", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/_protocol.py#L110-L134
ciena/afkak
afkak/producer.py
Producer.send_messages
def send_messages(self, topic, key=None, msgs=()): """ Given a topic, and optional key (for partitioning) and a list of messages, send them to Kafka, either immediately, or when a batch is ready, depending on the Producer's batch settings. :param str topic: Kafka topic to send t...
python
def send_messages(self, topic, key=None, msgs=()): """ Given a topic, and optional key (for partitioning) and a list of messages, send them to Kafka, either immediately, or when a batch is ready, depending on the Producer's batch settings. :param str topic: Kafka topic to send t...
[ "def", "send_messages", "(", "self", ",", "topic", ",", "key", "=", "None", ",", "msgs", "=", "(", ")", ")", ":", "try", ":", "topic", "=", "_coerce_topic", "(", "topic", ")", "if", "key", "is", "not", "None", "and", "not", "isinstance", "(", "key"...
Given a topic, and optional key (for partitioning) and a list of messages, send them to Kafka, either immediately, or when a batch is ready, depending on the Producer's batch settings. :param str topic: Kafka topic to send the messages to :param str key: Message key used to...
[ "Given", "a", "topic", "and", "optional", "key", "(", "for", "partitioning", ")", "and", "a", "list", "of", "messages", "send", "them", "to", "Kafka", "either", "immediately", "or", "when", "a", "batch", "is", "ready", "depending", "on", "the", "Producer",...
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L166-L233
ciena/afkak
afkak/producer.py
Producer.stop
def stop(self): """ Terminate any outstanding requests. :returns: :class:``Deferred` which fires when fully stopped. """ self.stopping = True # Cancel any outstanding request to our client if self._batch_send_d: self._batch_send_d.cancel() # D...
python
def stop(self): """ Terminate any outstanding requests. :returns: :class:``Deferred` which fires when fully stopped. """ self.stopping = True # Cancel any outstanding request to our client if self._batch_send_d: self._batch_send_d.cancel() # D...
[ "def", "stop", "(", "self", ")", ":", "self", ".", "stopping", "=", "True", "# Cancel any outstanding request to our client", "if", "self", ".", "_batch_send_d", ":", "self", ".", "_batch_send_d", ".", "cancel", "(", ")", "# Do we have to worry about our looping call?...
Terminate any outstanding requests. :returns: :class:``Deferred` which fires when fully stopped.
[ "Terminate", "any", "outstanding", "requests", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L235-L252
ciena/afkak
afkak/producer.py
Producer._send_timer_failed
def _send_timer_failed(self, fail): """ Our _send_batch() function called by the LoopingCall failed. Some error probably came back from Kafka and _check_error() raised the exception For now, just log the failure and restart the loop """ log.warning('_send_timer_fa...
python
def _send_timer_failed(self, fail): """ Our _send_batch() function called by the LoopingCall failed. Some error probably came back from Kafka and _check_error() raised the exception For now, just log the failure and restart the loop """ log.warning('_send_timer_fa...
[ "def", "_send_timer_failed", "(", "self", ",", "fail", ")", ":", "log", ".", "warning", "(", "'_send_timer_failed:%r: %s'", ",", "fail", ",", "fail", ".", "getBriefTraceback", "(", ")", ")", "self", ".", "_sendLooperD", "=", "self", ".", "_sendLooper", ".", ...
Our _send_batch() function called by the LoopingCall failed. Some error probably came back from Kafka and _check_error() raised the exception For now, just log the failure and restart the loop
[ "Our", "_send_batch", "()", "function", "called", "by", "the", "LoopingCall", "failed", ".", "Some", "error", "probably", "came", "back", "from", "Kafka", "and", "_check_error", "()", "raised", "the", "exception", "For", "now", "just", "log", "the", "failure",...
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L256-L266
ciena/afkak
afkak/producer.py
Producer._send_timer_stopped
def _send_timer_stopped(self, lCall): """ We're shutting down, clean up our looping call... """ if self._sendLooper is not lCall: log.warning('commitTimerStopped with wrong timer:%s not:%s', lCall, self._sendLooper) else: self._send...
python
def _send_timer_stopped(self, lCall): """ We're shutting down, clean up our looping call... """ if self._sendLooper is not lCall: log.warning('commitTimerStopped with wrong timer:%s not:%s', lCall, self._sendLooper) else: self._send...
[ "def", "_send_timer_stopped", "(", "self", ",", "lCall", ")", ":", "if", "self", ".", "_sendLooper", "is", "not", "lCall", ":", "log", ".", "warning", "(", "'commitTimerStopped with wrong timer:%s not:%s'", ",", "lCall", ",", "self", ".", "_sendLooper", ")", "...
We're shutting down, clean up our looping call...
[ "We", "re", "shutting", "down", "clean", "up", "our", "looping", "call", "..." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L268-L277
ciena/afkak
afkak/producer.py
Producer._next_partition
def _next_partition(self, topic, key=None): """get the next partition to which to publish Check with our client for the latest partitions for the topic, then ask our partitioner for the next partition to which we should publish for the give key. If needed, create a new partitioner for t...
python
def _next_partition(self, topic, key=None): """get the next partition to which to publish Check with our client for the latest partitions for the topic, then ask our partitioner for the next partition to which we should publish for the give key. If needed, create a new partitioner for t...
[ "def", "_next_partition", "(", "self", ",", "topic", ",", "key", "=", "None", ")", ":", "# check if the client has metadata for the topic", "while", "self", ".", "client", ".", "metadata_error_for_topic", "(", "topic", ")", ":", "# client doesn't have good metadata for ...
get the next partition to which to publish Check with our client for the latest partitions for the topic, then ask our partitioner for the next partition to which we should publish for the give key. If needed, create a new partitioner for the topic.
[ "get", "the", "next", "partition", "to", "which", "to", "publish" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L280-L313
ciena/afkak
afkak/producer.py
Producer._send_requests
def _send_requests(self, parts_results, requests): """Send the requests We've determined the partition for each message group in the batch, or got errors for them. """ # We use these dictionaries to be able to combine all the messages # destined to the same topic/partiti...
python
def _send_requests(self, parts_results, requests): """Send the requests We've determined the partition for each message group in the batch, or got errors for them. """ # We use these dictionaries to be able to combine all the messages # destined to the same topic/partiti...
[ "def", "_send_requests", "(", "self", ",", "parts_results", ",", "requests", ")", ":", "# We use these dictionaries to be able to combine all the messages", "# destined to the same topic/partition into one request", "# the messages & deferreds, both by topic+partition", "reqsByTopicPart", ...
Send the requests We've determined the partition for each message group in the batch, or got errors for them.
[ "Send", "the", "requests" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L315-L378
ciena/afkak
afkak/producer.py
Producer._complete_batch_send
def _complete_batch_send(self, resp): """Complete the processing of our batch send operation Clear the deferred tracking our current batch processing and reset our retry count and retry interval Return none to eat any errors coming from up the deferred chain """ self._ba...
python
def _complete_batch_send(self, resp): """Complete the processing of our batch send operation Clear the deferred tracking our current batch processing and reset our retry count and retry interval Return none to eat any errors coming from up the deferred chain """ self._ba...
[ "def", "_complete_batch_send", "(", "self", ",", "resp", ")", ":", "self", ".", "_batch_send_d", "=", "None", "self", ".", "_req_attempts", "=", "0", "self", ".", "_retry_interval", "=", "self", ".", "_init_retry_interval", "if", "isinstance", "(", "resp", "...
Complete the processing of our batch send operation Clear the deferred tracking our current batch processing and reset our retry count and retry interval Return none to eat any errors coming from up the deferred chain
[ "Complete", "the", "processing", "of", "our", "batch", "send", "operation" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L380-L394
ciena/afkak
afkak/producer.py
Producer._check_send_batch
def _check_send_batch(self, result=None): """Check if we have enough messages/bytes to send Since this can be called from the callback chain, we pass through our first (non-self) arg """ if ( (self.batch_every_n and self.batch_every_n <= self._waitingMsgCount) or ...
python
def _check_send_batch(self, result=None): """Check if we have enough messages/bytes to send Since this can be called from the callback chain, we pass through our first (non-self) arg """ if ( (self.batch_every_n and self.batch_every_n <= self._waitingMsgCount) or ...
[ "def", "_check_send_batch", "(", "self", ",", "result", "=", "None", ")", ":", "if", "(", "(", "self", ".", "batch_every_n", "and", "self", ".", "batch_every_n", "<=", "self", ".", "_waitingMsgCount", ")", "or", "(", "self", ".", "batch_every_b", "and", ...
Check if we have enough messages/bytes to send Since this can be called from the callback chain, we pass through our first (non-self) arg
[ "Check", "if", "we", "have", "enough", "messages", "/", "bytes", "to", "send", "Since", "this", "can", "be", "called", "from", "the", "callback", "chain", "we", "pass", "through", "our", "first", "(", "non", "-", "self", ")", "arg" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L396-L406
ciena/afkak
afkak/producer.py
Producer._send_batch
def _send_batch(self): """ Send the waiting messages, if there are any, and we can... This is called by our LoopingCall every send_every_t interval, and from send_messages everytime we have enough messages to send. This is also called from py:method:`send_messages` via p...
python
def _send_batch(self): """ Send the waiting messages, if there are any, and we can... This is called by our LoopingCall every send_every_t interval, and from send_messages everytime we have enough messages to send. This is also called from py:method:`send_messages` via p...
[ "def", "_send_batch", "(", "self", ")", ":", "# We can be triggered by the LoopingCall, and have nothing to send...", "# Or, we've got SendRequest(s) to send, but are still processing the", "# previous batch...", "if", "(", "not", "self", ".", "_batch_reqs", ")", "or", "self", "....
Send the waiting messages, if there are any, and we can... This is called by our LoopingCall every send_every_t interval, and from send_messages everytime we have enough messages to send. This is also called from py:method:`send_messages` via py:method:`_check_send_batch` if there are e...
[ "Send", "the", "waiting", "messages", "if", "there", "are", "any", "and", "we", "can", "..." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L408-L449
ciena/afkak
afkak/producer.py
Producer._cancel_send_messages
def _cancel_send_messages(self, d): """Cancel a `send_messages` request First check if the request is in a waiting batch, of so, great, remove it from the batch. If it's not found, we errback() the deferred and the downstream processing steps take care of aborting further process...
python
def _cancel_send_messages(self, d): """Cancel a `send_messages` request First check if the request is in a waiting batch, of so, great, remove it from the batch. If it's not found, we errback() the deferred and the downstream processing steps take care of aborting further process...
[ "def", "_cancel_send_messages", "(", "self", ",", "d", ")", ":", "# Is the request in question in an unsent batch?", "for", "req", "in", "self", ".", "_batch_reqs", ":", "if", "req", ".", "deferred", "==", "d", ":", "# Found the request, remove it and return.", "msgs"...
Cancel a `send_messages` request First check if the request is in a waiting batch, of so, great, remove it from the batch. If it's not found, we errback() the deferred and the downstream processing steps take care of aborting further processing. We check if there's a current _bat...
[ "Cancel", "a", "send_messages", "request", "First", "check", "if", "the", "request", "is", "in", "a", "waiting", "batch", "of", "so", "great", "remove", "it", "from", "the", "batch", ".", "If", "it", "s", "not", "found", "we", "errback", "()", "the", "...
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L451-L481
ciena/afkak
afkak/producer.py
Producer._handle_send_response
def _handle_send_response(self, result, payloadsByTopicPart, deferredsByTopicPart): """Handle the response from our client to our send_produce_request This is a bit complex. Failures can happen in a few ways: 1. The client sent an empty list, False, None or some...
python
def _handle_send_response(self, result, payloadsByTopicPart, deferredsByTopicPart): """Handle the response from our client to our send_produce_request This is a bit complex. Failures can happen in a few ways: 1. The client sent an empty list, False, None or some...
[ "def", "_handle_send_response", "(", "self", ",", "result", ",", "payloadsByTopicPart", ",", "deferredsByTopicPart", ")", ":", "def", "_deliver_result", "(", "d_list", ",", "result", "=", "None", ")", ":", "\"\"\"Possibly callback each deferred in a list with single resul...
Handle the response from our client to our send_produce_request This is a bit complex. Failures can happen in a few ways: 1. The client sent an empty list, False, None or some similar thing as the result, but we were expecting real responses. 2. The client had a failure before...
[ "Handle", "the", "response", "from", "our", "client", "to", "our", "send_produce_request" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L483-L650
ciena/afkak
afkak/producer.py
Producer._cancel_outstanding
def _cancel_outstanding(self): """Cancel all of our outstanding requests""" for d in list(self._outstanding): d.addErrback(lambda _: None) # Eat any uncaught errors d.cancel()
python
def _cancel_outstanding(self): """Cancel all of our outstanding requests""" for d in list(self._outstanding): d.addErrback(lambda _: None) # Eat any uncaught errors d.cancel()
[ "def", "_cancel_outstanding", "(", "self", ")", ":", "for", "d", "in", "list", "(", "self", ".", "_outstanding", ")", ":", "d", ".", "addErrback", "(", "lambda", "_", ":", "None", ")", "# Eat any uncaught errors", "d", ".", "cancel", "(", ")" ]
Cancel all of our outstanding requests
[ "Cancel", "all", "of", "our", "outstanding", "requests" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/producer.py#L657-L661
ciena/afkak
afkak/client.py
_normalize_hosts
def _normalize_hosts(hosts): """ Canonicalize the *hosts* parameter. >>> _normalize_hosts("host,127.0.0.2:2909") [('127.0.0.2', 2909), ('host', 9092)] :param hosts: A list or comma-separated string of hostnames which may also include port numbers. All of the following are valid:: ...
python
def _normalize_hosts(hosts): """ Canonicalize the *hosts* parameter. >>> _normalize_hosts("host,127.0.0.2:2909") [('127.0.0.2', 2909), ('host', 9092)] :param hosts: A list or comma-separated string of hostnames which may also include port numbers. All of the following are valid:: ...
[ "def", "_normalize_hosts", "(", "hosts", ")", ":", "if", "isinstance", "(", "hosts", ",", "bytes", ")", ":", "hosts", "=", "hosts", ".", "split", "(", "b','", ")", "elif", "isinstance", "(", "hosts", ",", "_unicode", ")", ":", "hosts", "=", "hosts", ...
Canonicalize the *hosts* parameter. >>> _normalize_hosts("host,127.0.0.2:2909") [('127.0.0.2', 2909), ('host', 9092)] :param hosts: A list or comma-separated string of hostnames which may also include port numbers. All of the following are valid:: b'host' u'host' ...
[ "Canonicalize", "the", "*", "hosts", "*", "parameter", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L1083-L1120
ciena/afkak
afkak/client.py
KafkaClient.reset_consumer_group_metadata
def reset_consumer_group_metadata(self, *groups): """Reset cache of what broker manages the offset for specified groups Remove the cache of what Kafka broker should be contacted when fetching or updating the committed offsets for a given consumer group or groups. NOTE: Does not...
python
def reset_consumer_group_metadata(self, *groups): """Reset cache of what broker manages the offset for specified groups Remove the cache of what Kafka broker should be contacted when fetching or updating the committed offsets for a given consumer group or groups. NOTE: Does not...
[ "def", "reset_consumer_group_metadata", "(", "self", ",", "*", "groups", ")", ":", "groups", "=", "tuple", "(", "_coerce_consumer_group", "(", "g", ")", "for", "g", "in", "groups", ")", "for", "group", "in", "groups", ":", "if", "group", "in", "self", "....
Reset cache of what broker manages the offset for specified groups Remove the cache of what Kafka broker should be contacted when fetching or updating the committed offsets for a given consumer group or groups. NOTE: Does not cancel any outstanding requests for updates to the c...
[ "Reset", "cache", "of", "what", "broker", "manages", "the", "offset", "for", "specified", "groups" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L252-L265
ciena/afkak
afkak/client.py
KafkaClient.reset_all_metadata
def reset_all_metadata(self): """Clear all cached metadata Metadata will be re-fetched as required to satisfy requests. """ self.topics_to_brokers.clear() self.topic_partitions.clear() self.topic_errors.clear() self.consumer_group_to_brokers.clear()
python
def reset_all_metadata(self): """Clear all cached metadata Metadata will be re-fetched as required to satisfy requests. """ self.topics_to_brokers.clear() self.topic_partitions.clear() self.topic_errors.clear() self.consumer_group_to_brokers.clear()
[ "def", "reset_all_metadata", "(", "self", ")", ":", "self", ".", "topics_to_brokers", ".", "clear", "(", ")", "self", ".", "topic_partitions", ".", "clear", "(", ")", "self", ".", "topic_errors", ".", "clear", "(", ")", "self", ".", "consumer_group_to_broker...
Clear all cached metadata Metadata will be re-fetched as required to satisfy requests.
[ "Clear", "all", "cached", "metadata" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L267-L275
ciena/afkak
afkak/client.py
KafkaClient.topic_fully_replicated
def topic_fully_replicated(self, topic): """ Determine if the given topic is fully replicated according to the currently known cluster metadata. .. note:: This relies on cached cluster metadata. You may call :meth:`load_metadata_for_topics()` first to refresh th...
python
def topic_fully_replicated(self, topic): """ Determine if the given topic is fully replicated according to the currently known cluster metadata. .. note:: This relies on cached cluster metadata. You may call :meth:`load_metadata_for_topics()` first to refresh th...
[ "def", "topic_fully_replicated", "(", "self", ",", "topic", ")", ":", "topic", "=", "_coerce_topic", "(", "topic", ")", "if", "topic", "not", "in", "self", ".", "topic_partitions", ":", "return", "False", "if", "not", "self", ".", "topic_partitions", "[", ...
Determine if the given topic is fully replicated according to the currently known cluster metadata. .. note:: This relies on cached cluster metadata. You may call :meth:`load_metadata_for_topics()` first to refresh this cache. :param str topic: Topic name :ret...
[ "Determine", "if", "the", "given", "topic", "is", "fully", "replicated", "according", "to", "the", "currently", "known", "cluster", "metadata", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L290-L319
ciena/afkak
afkak/client.py
KafkaClient.close
def close(self): """Permanently dispose of the client - Immediately mark the client as closed, causing current operations to fail with :exc:`~afkak.common.CancelledError` and future operations to fail with :exc:`~afkak.common.ClientError`. - Clear cached metadata. - ...
python
def close(self): """Permanently dispose of the client - Immediately mark the client as closed, causing current operations to fail with :exc:`~afkak.common.CancelledError` and future operations to fail with :exc:`~afkak.common.ClientError`. - Clear cached metadata. - ...
[ "def", "close", "(", "self", ")", ":", "# If we're already waiting on an/some outstanding disconnects", "# make sure we continue to wait for them...", "log", ".", "debug", "(", "\"%r: close\"", ",", "self", ")", "self", ".", "_closing", "=", "True", "# Close down any client...
Permanently dispose of the client - Immediately mark the client as closed, causing current operations to fail with :exc:`~afkak.common.CancelledError` and future operations to fail with :exc:`~afkak.common.ClientError`. - Clear cached metadata. - Close any connections to Kaf...
[ "Permanently", "dispose", "of", "the", "client" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L321-L342
ciena/afkak
afkak/client.py
KafkaClient.load_metadata_for_topics
def load_metadata_for_topics(self, *topics): """Discover topic metadata and brokers Afkak internally calls this method whenever metadata is required. :param str topics: Topic names to look up. The resulting metadata includes the list of topic partitions, brokers owning ...
python
def load_metadata_for_topics(self, *topics): """Discover topic metadata and brokers Afkak internally calls this method whenever metadata is required. :param str topics: Topic names to look up. The resulting metadata includes the list of topic partitions, brokers owning ...
[ "def", "load_metadata_for_topics", "(", "self", ",", "*", "topics", ")", ":", "topics", "=", "tuple", "(", "_coerce_topic", "(", "t", ")", "for", "t", "in", "topics", ")", "log", ".", "debug", "(", "\"%r: load_metadata_for_topics(%s)\"", ",", "self", ",", ...
Discover topic metadata and brokers Afkak internally calls this method whenever metadata is required. :param str topics: Topic names to look up. The resulting metadata includes the list of topic partitions, brokers owning those partitions, and which partitions are i...
[ "Discover", "topic", "metadata", "and", "brokers" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L344-L440
ciena/afkak
afkak/client.py
KafkaClient.load_consumer_metadata_for_group
def load_consumer_metadata_for_group(self, group): """ Determine broker for the consumer metadata for the specified group Returns a deferred which callbacks with True if the group's coordinator could be determined, or errbacks with ConsumerCoordinatorNotAvailableError if not. ...
python
def load_consumer_metadata_for_group(self, group): """ Determine broker for the consumer metadata for the specified group Returns a deferred which callbacks with True if the group's coordinator could be determined, or errbacks with ConsumerCoordinatorNotAvailableError if not. ...
[ "def", "load_consumer_metadata_for_group", "(", "self", ",", "group", ")", ":", "group", "=", "_coerce_consumer_group", "(", "group", ")", "log", ".", "debug", "(", "\"%r: load_consumer_metadata_for_group(%r)\"", ",", "self", ",", "group", ")", "# If we are already lo...
Determine broker for the consumer metadata for the specified group Returns a deferred which callbacks with True if the group's coordinator could be determined, or errbacks with ConsumerCoordinatorNotAvailableError if not. Parameters ---------- group: group n...
[ "Determine", "broker", "for", "the", "consumer", "metadata", "for", "the", "specified", "group" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L442-L506
ciena/afkak
afkak/client.py
KafkaClient.send_produce_request
def send_produce_request(self, payloads=None, acks=1, timeout=DEFAULT_REPLICAS_ACK_MSECS, fail_on_error=True, callback=None): """ Encode and send some ProduceRequests ProduceRequests will be grouped by (topic, partition) and then ...
python
def send_produce_request(self, payloads=None, acks=1, timeout=DEFAULT_REPLICAS_ACK_MSECS, fail_on_error=True, callback=None): """ Encode and send some ProduceRequests ProduceRequests will be grouped by (topic, partition) and then ...
[ "def", "send_produce_request", "(", "self", ",", "payloads", "=", "None", ",", "acks", "=", "1", ",", "timeout", "=", "DEFAULT_REPLICAS_ACK_MSECS", ",", "fail_on_error", "=", "True", ",", "callback", "=", "None", ")", ":", "encoder", "=", "partial", "(", "...
Encode and send some ProduceRequests ProduceRequests will be grouped by (topic, partition) and then sent to a specific broker. Output is a list of responses in the same order as the list of payloads specified Parameters ---------- payloads: list of ProduceRe...
[ "Encode", "and", "send", "some", "ProduceRequests" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L509-L557
ciena/afkak
afkak/client.py
KafkaClient.send_fetch_request
def send_fetch_request(self, payloads=None, fail_on_error=True, callback=None, max_wait_time=DEFAULT_FETCH_SERVER_WAIT_MSECS, min_bytes=DEFAULT_FETCH_MIN_BYTES): """ Encode and send a FetchRequest Payloads are grou...
python
def send_fetch_request(self, payloads=None, fail_on_error=True, callback=None, max_wait_time=DEFAULT_FETCH_SERVER_WAIT_MSECS, min_bytes=DEFAULT_FETCH_MIN_BYTES): """ Encode and send a FetchRequest Payloads are grou...
[ "def", "send_fetch_request", "(", "self", ",", "payloads", "=", "None", ",", "fail_on_error", "=", "True", ",", "callback", "=", "None", ",", "max_wait_time", "=", "DEFAULT_FETCH_SERVER_WAIT_MSECS", ",", "min_bytes", "=", "DEFAULT_FETCH_MIN_BYTES", ")", ":", "if",...
Encode and send a FetchRequest Payloads are grouped by topic and partition so they can be pipelined to the same brokers. Raises ====== FailedPayloadsError, LeaderUnavailableError, PartitionUnavailableError
[ "Encode", "and", "send", "a", "FetchRequest" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L560-L589
ciena/afkak
afkak/client.py
KafkaClient.send_offset_fetch_request
def send_offset_fetch_request(self, group, payloads=None, fail_on_error=True, callback=None): """ Takes a group (string) and list of OffsetFetchRequest and returns a list of OffsetFetchResponse objects """ encoder = partial(KafkaCodec.encode_offs...
python
def send_offset_fetch_request(self, group, payloads=None, fail_on_error=True, callback=None): """ Takes a group (string) and list of OffsetFetchRequest and returns a list of OffsetFetchResponse objects """ encoder = partial(KafkaCodec.encode_offs...
[ "def", "send_offset_fetch_request", "(", "self", ",", "group", ",", "payloads", "=", "None", ",", "fail_on_error", "=", "True", ",", "callback", "=", "None", ")", ":", "encoder", "=", "partial", "(", "KafkaCodec", ".", "encode_offset_fetch_request", ",", "grou...
Takes a group (string) and list of OffsetFetchRequest and returns a list of OffsetFetchResponse objects
[ "Takes", "a", "group", "(", "string", ")", "and", "list", "of", "OffsetFetchRequest", "and", "returns", "a", "list", "of", "OffsetFetchResponse", "objects" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L602-L615
ciena/afkak
afkak/client.py
KafkaClient.send_offset_commit_request
def send_offset_commit_request(self, group, payloads=None, fail_on_error=True, callback=None, group_generation_id=-1, consumer_id=''): """Send a list of OffsetCommitRequests to the Kafka broker for the ...
python
def send_offset_commit_request(self, group, payloads=None, fail_on_error=True, callback=None, group_generation_id=-1, consumer_id=''): """Send a list of OffsetCommitRequests to the Kafka broker for the ...
[ "def", "send_offset_commit_request", "(", "self", ",", "group", ",", "payloads", "=", "None", ",", "fail_on_error", "=", "True", ",", "callback", "=", "None", ",", "group_generation_id", "=", "-", "1", ",", "consumer_id", "=", "''", ")", ":", "group", "=",...
Send a list of OffsetCommitRequests to the Kafka broker for the given consumer group. Args: group (str): The consumer group to which to commit the offsets payloads ([OffsetCommitRequest]): List of topic, partition, offsets to commit. fail_on_error (bool): Wheth...
[ "Send", "a", "list", "of", "OffsetCommitRequests", "to", "the", "Kafka", "broker", "for", "the", "given", "consumer", "group", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L618-L648
ciena/afkak
afkak/client.py
KafkaClient._get_brokerclient
def _get_brokerclient(self, node_id): """ Get a broker client. :param int node_id: Broker node ID :raises KeyError: for an unknown node ID :returns: :class:`_KafkaBrokerClient` """ if self._closing: raise ClientError("Cannot get broker client for node...
python
def _get_brokerclient(self, node_id): """ Get a broker client. :param int node_id: Broker node ID :raises KeyError: for an unknown node ID :returns: :class:`_KafkaBrokerClient` """ if self._closing: raise ClientError("Cannot get broker client for node...
[ "def", "_get_brokerclient", "(", "self", ",", "node_id", ")", ":", "if", "self", ".", "_closing", ":", "raise", "ClientError", "(", "\"Cannot get broker client for node_id={}: {} has been closed\"", ".", "format", "(", "node_id", ",", "self", ")", ")", "if", "node...
Get a broker client. :param int node_id: Broker node ID :raises KeyError: for an unknown node ID :returns: :class:`_KafkaBrokerClient`
[ "Get", "a", "broker", "client", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L678-L695
ciena/afkak
afkak/client.py
KafkaClient._close_brokerclients
def _close_brokerclients(self, clients): """ Close the given broker clients. :param clients: Iterable of `_KafkaBrokerClient` """ def _log_close_failure(failure, brokerclient): log.debug( 'BrokerClient: %s close result: %s: %s', brokerclient, ...
python
def _close_brokerclients(self, clients): """ Close the given broker clients. :param clients: Iterable of `_KafkaBrokerClient` """ def _log_close_failure(failure, brokerclient): log.debug( 'BrokerClient: %s close result: %s: %s', brokerclient, ...
[ "def", "_close_brokerclients", "(", "self", ",", "clients", ")", ":", "def", "_log_close_failure", "(", "failure", ",", "brokerclient", ")", ":", "log", ".", "debug", "(", "'BrokerClient: %s close result: %s: %s'", ",", "brokerclient", ",", "failure", ".", "type",...
Close the given broker clients. :param clients: Iterable of `_KafkaBrokerClient`
[ "Close", "the", "given", "broker", "clients", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L697-L725
ciena/afkak
afkak/client.py
KafkaClient._update_brokers
def _update_brokers(self, brokers, remove=False): """ Update `self._brokers` and `self.clients` Update our self.clients based on brokers in received metadata Take the received dict of brokers and reconcile it with our current list of brokers (self.clients). If there is a new one...
python
def _update_brokers(self, brokers, remove=False): """ Update `self._brokers` and `self.clients` Update our self.clients based on brokers in received metadata Take the received dict of brokers and reconcile it with our current list of brokers (self.clients). If there is a new one...
[ "def", "_update_brokers", "(", "self", ",", "brokers", ",", "remove", "=", "False", ")", ":", "log", ".", "debug", "(", "\"%r: _update_brokers(%r, remove=%r)\"", ",", "self", ",", "brokers", ",", "remove", ")", "brokers_by_id", "=", "{", "bm", ".", "node_id"...
Update `self._brokers` and `self.clients` Update our self.clients based on brokers in received metadata Take the received dict of brokers and reconcile it with our current list of brokers (self.clients). If there is a new one, bring up a new connection to it, and if remove is True, and ...
[ "Update", "self", ".", "_brokers", "and", "self", ".", "clients" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L727-L762
ciena/afkak
afkak/client.py
KafkaClient._get_leader_for_partition
def _get_leader_for_partition(self, topic, partition): """ Returns the leader for a partition or None if the partition exists but has no leader. PartitionUnavailableError will be raised if the topic or partition is not part of the metadata. """ key = TopicAndPar...
python
def _get_leader_for_partition(self, topic, partition): """ Returns the leader for a partition or None if the partition exists but has no leader. PartitionUnavailableError will be raised if the topic or partition is not part of the metadata. """ key = TopicAndPar...
[ "def", "_get_leader_for_partition", "(", "self", ",", "topic", ",", "partition", ")", ":", "key", "=", "TopicAndPartition", "(", "topic", ",", "partition", ")", "# reload metadata whether the partition is not available", "# or has no leader (broker is None)", "if", "self", ...
Returns the leader for a partition or None if the partition exists but has no leader. PartitionUnavailableError will be raised if the topic or partition is not part of the metadata.
[ "Returns", "the", "leader", "for", "a", "partition", "or", "None", "if", "the", "partition", "exists", "but", "has", "no", "leader", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L765-L783
ciena/afkak
afkak/client.py
KafkaClient._get_coordinator_for_group
def _get_coordinator_for_group(self, consumer_group): """Returns the coordinator (broker) for a consumer group Returns the broker for a given consumer group or Raises ConsumerCoordinatorNotAvailableError """ if self.consumer_group_to_brokers.get(consumer_group) is None: ...
python
def _get_coordinator_for_group(self, consumer_group): """Returns the coordinator (broker) for a consumer group Returns the broker for a given consumer group or Raises ConsumerCoordinatorNotAvailableError """ if self.consumer_group_to_brokers.get(consumer_group) is None: ...
[ "def", "_get_coordinator_for_group", "(", "self", ",", "consumer_group", ")", ":", "if", "self", ".", "consumer_group_to_brokers", ".", "get", "(", "consumer_group", ")", "is", "None", ":", "yield", "self", ".", "load_consumer_metadata_for_group", "(", "consumer_gro...
Returns the coordinator (broker) for a consumer group Returns the broker for a given consumer group or Raises ConsumerCoordinatorNotAvailableError
[ "Returns", "the", "coordinator", "(", "broker", ")", "for", "a", "consumer", "group" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L786-L795
ciena/afkak
afkak/client.py
KafkaClient._make_request_to_broker
def _make_request_to_broker(self, broker, requestId, request, **kwArgs): """Send a request to the specified broker.""" def _timeout_request(broker, requestId): """The time we allotted for the request expired, cancel it.""" try: # FIXME: This should be done by call...
python
def _make_request_to_broker(self, broker, requestId, request, **kwArgs): """Send a request to the specified broker.""" def _timeout_request(broker, requestId): """The time we allotted for the request expired, cancel it.""" try: # FIXME: This should be done by call...
[ "def", "_make_request_to_broker", "(", "self", ",", "broker", ",", "requestId", ",", "request", ",", "*", "*", "kwArgs", ")", ":", "def", "_timeout_request", "(", "broker", ",", "requestId", ")", ":", "\"\"\"The time we allotted for the request expired, cancel it.\"\"...
Send a request to the specified broker.
[ "Send", "a", "request", "to", "the", "specified", "broker", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L803-L846
ciena/afkak
afkak/client.py
KafkaClient._send_broker_unaware_request
def _send_broker_unaware_request(self, requestId, request): """ Attempt to send a broker-agnostic request to one of the known brokers: 1. Try each connected broker (in random order) 2. Try each known but unconnected broker (in random order) 3. Try each of the bootstrap hosts (in...
python
def _send_broker_unaware_request(self, requestId, request): """ Attempt to send a broker-agnostic request to one of the known brokers: 1. Try each connected broker (in random order) 2. Try each known but unconnected broker (in random order) 3. Try each of the bootstrap hosts (in...
[ "def", "_send_broker_unaware_request", "(", "self", ",", "requestId", ",", "request", ")", ":", "node_ids", "=", "list", "(", "self", ".", "_brokers", ".", "keys", "(", ")", ")", "# Randomly shuffle the brokers to distribute the load", "random", ".", "shuffle", "(...
Attempt to send a broker-agnostic request to one of the known brokers: 1. Try each connected broker (in random order) 2. Try each known but unconnected broker (in random order) 3. Try each of the bootstrap hosts (in random order) :param bytes request: The bytes of a Kafka `...
[ "Attempt", "to", "send", "a", "broker", "-", "agnostic", "request", "to", "one", "of", "the", "known", "brokers", ":" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L849-L897
ciena/afkak
afkak/client.py
KafkaClient._send_bootstrap_request
def _send_bootstrap_request(self, request): """Make a request using an ephemeral broker connection This routine is used to make broker-unaware requests to get the initial cluster metadata. It cycles through the configured hosts, trying to connect and send the request to each in turn. Th...
python
def _send_bootstrap_request(self, request): """Make a request using an ephemeral broker connection This routine is used to make broker-unaware requests to get the initial cluster metadata. It cycles through the configured hosts, trying to connect and send the request to each in turn. Th...
[ "def", "_send_bootstrap_request", "(", "self", ",", "request", ")", ":", "hostports", "=", "list", "(", "self", ".", "_bootstrap_hosts", ")", "random", ".", "shuffle", "(", "hostports", ")", "for", "host", ",", "port", "in", "hostports", ":", "ep", "=", ...
Make a request using an ephemeral broker connection This routine is used to make broker-unaware requests to get the initial cluster metadata. It cycles through the configured hosts, trying to connect and send the request to each in turn. This temporary connection is closed once a respon...
[ "Make", "a", "request", "using", "an", "ephemeral", "broker", "connection" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L900-L947
ciena/afkak
afkak/client.py
KafkaClient._send_broker_aware_request
def _send_broker_aware_request(self, payloads, encoder_fn, decode_fn, consumer_group=None): """ Group a list of request payloads by topic+partition and send them to the leader broker for that partition using the supplied encode/decode functions ...
python
def _send_broker_aware_request(self, payloads, encoder_fn, decode_fn, consumer_group=None): """ Group a list of request payloads by topic+partition and send them to the leader broker for that partition using the supplied encode/decode functions ...
[ "def", "_send_broker_aware_request", "(", "self", ",", "payloads", ",", "encoder_fn", ",", "decode_fn", ",", "consumer_group", "=", "None", ")", ":", "# Calling this without payloads is nonsensical", "if", "not", "payloads", ":", "raise", "ValueError", "(", "\"Payload...
Group a list of request payloads by topic+partition and send them to the leader broker for that partition using the supplied encode/decode functions Params ====== payloads: list of object-like entities with a topic and partition attribute. payloads must be grou...
[ "Group", "a", "list", "of", "request", "payloads", "by", "topic", "+", "partition", "and", "send", "them", "to", "the", "leader", "broker", "for", "that", "partition", "using", "the", "supplied", "encode", "/", "decode", "functions" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/client.py#L950-L1080
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
get_description
def get_description(expression, options=None): """Generates a human readable string for the Cron Expression Args: expression: The cron expression string options: Options to control the output description Returns: The cron expression description """ descripter = ExpressionDes...
python
def get_description(expression, options=None): """Generates a human readable string for the Cron Expression Args: expression: The cron expression string options: Options to control the output description Returns: The cron expression description """ descripter = ExpressionDes...
[ "def", "get_description", "(", "expression", ",", "options", "=", "None", ")", ":", "descripter", "=", "ExpressionDescriptor", "(", "expression", ",", "options", ")", "return", "descripter", ".", "get_description", "(", "DescriptionTypeEnum", ".", "FULL", ")" ]
Generates a human readable string for the Cron Expression Args: expression: The cron expression string options: Options to control the output description Returns: The cron expression description
[ "Generates", "a", "human", "readable", "string", "for", "the", "Cron", "Expression", "Args", ":", "expression", ":", "The", "cron", "expression", "string", "options", ":", "Options", "to", "control", "the", "output", "description", "Returns", ":", "The", "cron...
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L605-L615
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_description
def get_description(self, description_type=DescriptionTypeEnum.FULL): """Generates a human readable string for the Cron Expression Args: description_type: Which part(s) of the expression to describe Returns: The cron expression description Raises: Exc...
python
def get_description(self, description_type=DescriptionTypeEnum.FULL): """Generates a human readable string for the Cron Expression Args: description_type: Which part(s) of the expression to describe Returns: The cron expression description Raises: Exc...
[ "def", "get_description", "(", "self", ",", "description_type", "=", "DescriptionTypeEnum", ".", "FULL", ")", ":", "try", ":", "if", "self", ".", "_parsed", "is", "False", ":", "parser", "=", "ExpressionParser", "(", "self", ".", "_expression", ",", "self", ...
Generates a human readable string for the Cron Expression Args: description_type: Which part(s) of the expression to describe Returns: The cron expression description Raises: Exception: if throw_exception_on_parse_error is True
[ "Generates", "a", "human", "readable", "string", "for", "the", "Cron", "Expression" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L76-L112
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_full_description
def get_full_description(self): """Generates the FULL description Returns: The FULL description Raises: FormatException: if formating fails and throw_exception_on_parse_error is True """ try: time_segment = self.get_time_of_day_description()...
python
def get_full_description(self): """Generates the FULL description Returns: The FULL description Raises: FormatException: if formating fails and throw_exception_on_parse_error is True """ try: time_segment = self.get_time_of_day_description()...
[ "def", "get_full_description", "(", "self", ")", ":", "try", ":", "time_segment", "=", "self", ".", "get_time_of_day_description", "(", ")", "day_of_month_desc", "=", "self", ".", "get_day_of_month_description", "(", ")", "month_desc", "=", "self", ".", "get_month...
Generates the FULL description Returns: The FULL description Raises: FormatException: if formating fails and throw_exception_on_parse_error is True
[ "Generates", "the", "FULL", "description" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L114-L149
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_time_of_day_description
def get_time_of_day_description(self): """Generates a description for only the TIMEOFDAY portion of the expression Returns: The TIMEOFDAY description """ seconds_expression = self._expression_parts[0] minute_expression = self._expression_parts[1] hour_expres...
python
def get_time_of_day_description(self): """Generates a description for only the TIMEOFDAY portion of the expression Returns: The TIMEOFDAY description """ seconds_expression = self._expression_parts[0] minute_expression = self._expression_parts[1] hour_expres...
[ "def", "get_time_of_day_description", "(", "self", ")", ":", "seconds_expression", "=", "self", ".", "_expression_parts", "[", "0", "]", "minute_expression", "=", "self", ".", "_expression_parts", "[", "1", "]", "hour_expression", "=", "self", ".", "_expression_pa...
Generates a description for only the TIMEOFDAY portion of the expression Returns: The TIMEOFDAY description
[ "Generates", "a", "description", "for", "only", "the", "TIMEOFDAY", "portion", "of", "the", "expression" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L151-L214
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_seconds_description
def get_seconds_description(self): """Generates a description for only the SECONDS portion of the expression Returns: The SECONDS description """ return self.get_segment_description( self._expression_parts[0], _("every second"), lambda s...
python
def get_seconds_description(self): """Generates a description for only the SECONDS portion of the expression Returns: The SECONDS description """ return self.get_segment_description( self._expression_parts[0], _("every second"), lambda s...
[ "def", "get_seconds_description", "(", "self", ")", ":", "return", "self", ".", "get_segment_description", "(", "self", ".", "_expression_parts", "[", "0", "]", ",", "_", "(", "\"every second\"", ")", ",", "lambda", "s", ":", "s", ",", "lambda", "s", ":", ...
Generates a description for only the SECONDS portion of the expression Returns: The SECONDS description
[ "Generates", "a", "description", "for", "only", "the", "SECONDS", "portion", "of", "the", "expression" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L216-L231
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_minutes_description
def get_minutes_description(self): """Generates a description for only the MINUTE portion of the expression Returns: The MINUTE description """ return self.get_segment_description( self._expression_parts[1], _("every minute"), lambda s: ...
python
def get_minutes_description(self): """Generates a description for only the MINUTE portion of the expression Returns: The MINUTE description """ return self.get_segment_description( self._expression_parts[1], _("every minute"), lambda s: ...
[ "def", "get_minutes_description", "(", "self", ")", ":", "return", "self", ".", "get_segment_description", "(", "self", ".", "_expression_parts", "[", "1", "]", ",", "_", "(", "\"every minute\"", ")", ",", "lambda", "s", ":", "s", ",", "lambda", "s", ":", ...
Generates a description for only the MINUTE portion of the expression Returns: The MINUTE description
[ "Generates", "a", "description", "for", "only", "the", "MINUTE", "portion", "of", "the", "expression" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L233-L248
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_hours_description
def get_hours_description(self): """Generates a description for only the HOUR portion of the expression Returns: The HOUR description """ expression = self._expression_parts[2] return self.get_segment_description( expression, _("every hour"),...
python
def get_hours_description(self): """Generates a description for only the HOUR portion of the expression Returns: The HOUR description """ expression = self._expression_parts[2] return self.get_segment_description( expression, _("every hour"),...
[ "def", "get_hours_description", "(", "self", ")", ":", "expression", "=", "self", ".", "_expression_parts", "[", "2", "]", "return", "self", ".", "get_segment_description", "(", "expression", ",", "_", "(", "\"every hour\"", ")", ",", "lambda", "s", ":", "se...
Generates a description for only the HOUR portion of the expression Returns: The HOUR description
[ "Generates", "a", "description", "for", "only", "the", "HOUR", "portion", "of", "the", "expression" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L250-L265
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_day_of_week_description
def get_day_of_week_description(self): """Generates a description for only the DAYOFWEEK portion of the expression Returns: The DAYOFWEEK description """ if self._expression_parts[5] == "*" and self._expression_parts[3] != "*": # DOM is specified and DOW is * s...
python
def get_day_of_week_description(self): """Generates a description for only the DAYOFWEEK portion of the expression Returns: The DAYOFWEEK description """ if self._expression_parts[5] == "*" and self._expression_parts[3] != "*": # DOM is specified and DOW is * s...
[ "def", "get_day_of_week_description", "(", "self", ")", ":", "if", "self", ".", "_expression_parts", "[", "5", "]", "==", "\"*\"", "and", "self", ".", "_expression_parts", "[", "3", "]", "!=", "\"*\"", ":", "# DOM is specified and DOW is * so to prevent contradictio...
Generates a description for only the DAYOFWEEK portion of the expression Returns: The DAYOFWEEK description
[ "Generates", "a", "description", "for", "only", "the", "DAYOFWEEK", "portion", "of", "the", "expression" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L267-L321
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_month_description
def get_month_description(self): """Generates a description for only the MONTH portion of the expression Returns: The MONTH description """ return self.get_segment_description( self._expression_parts[4], '', lambda s: datetime.date(dateti...
python
def get_month_description(self): """Generates a description for only the MONTH portion of the expression Returns: The MONTH description """ return self.get_segment_description( self._expression_parts[4], '', lambda s: datetime.date(dateti...
[ "def", "get_month_description", "(", "self", ")", ":", "return", "self", ".", "get_segment_description", "(", "self", ".", "_expression_parts", "[", "4", "]", ",", "''", ",", "lambda", "s", ":", "datetime", ".", "date", "(", "datetime", ".", "date", ".", ...
Generates a description for only the MONTH portion of the expression Returns: The MONTH description
[ "Generates", "a", "description", "for", "only", "the", "MONTH", "portion", "of", "the", "expression" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L323-L337
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_day_of_month_description
def get_day_of_month_description(self): """Generates a description for only the DAYOFMONTH portion of the expression Returns: The DAYOFMONTH description """ expression = self._expression_parts[3] expression = expression.replace("?", "*") if expression == "L...
python
def get_day_of_month_description(self): """Generates a description for only the DAYOFMONTH portion of the expression Returns: The DAYOFMONTH description """ expression = self._expression_parts[3] expression = expression.replace("?", "*") if expression == "L...
[ "def", "get_day_of_month_description", "(", "self", ")", ":", "expression", "=", "self", ".", "_expression_parts", "[", "3", "]", "expression", "=", "expression", ".", "replace", "(", "\"?\"", ",", "\"*\"", ")", "if", "expression", "==", "\"L\"", ":", "descr...
Generates a description for only the DAYOFMONTH portion of the expression Returns: The DAYOFMONTH description
[ "Generates", "a", "description", "for", "only", "the", "DAYOFMONTH", "portion", "of", "the", "expression" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L339-L373
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_year_description
def get_year_description(self): """Generates a description for only the YEAR portion of the expression Returns: The YEAR description """ def format_year(s): regex = re.compile(r"^\d+$") if regex.match(s): year_int = int(s) ...
python
def get_year_description(self): """Generates a description for only the YEAR portion of the expression Returns: The YEAR description """ def format_year(s): regex = re.compile(r"^\d+$") if regex.match(s): year_int = int(s) ...
[ "def", "get_year_description", "(", "self", ")", ":", "def", "format_year", "(", "s", ")", ":", "regex", "=", "re", ".", "compile", "(", "r\"^\\d+$\"", ")", "if", "regex", ".", "match", "(", "s", ")", ":", "year_int", "=", "int", "(", "s", ")", "if...
Generates a description for only the YEAR portion of the expression Returns: The YEAR description
[ "Generates", "a", "description", "for", "only", "the", "YEAR", "portion", "of", "the", "expression" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L375-L400
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.get_segment_description
def get_segment_description( self, expression, all_description, get_single_item_description, get_interval_description_format, get_between_description_format, get_description_format ): """Returns segment description Args: expression:...
python
def get_segment_description( self, expression, all_description, get_single_item_description, get_interval_description_format, get_between_description_format, get_description_format ): """Returns segment description Args: expression:...
[ "def", "get_segment_description", "(", "self", ",", "expression", ",", "all_description", ",", "get_single_item_description", ",", "get_interval_description_format", ",", "get_between_description_format", ",", "get_description_format", ")", ":", "description", "=", "None", ...
Returns segment description Args: expression: Segment to descript all_description: * get_single_item_description: 1 get_interval_description_format: 1/2 get_between_description_format: 1-2 get_description_format: format get_single_item_desc...
[ "Returns", "segment", "description", "Args", ":", "expression", ":", "Segment", "to", "descript", "all_description", ":", "*", "get_single_item_description", ":", "1", "get_interval_description_format", ":", "1", "/", "2", "get_between_description_format", ":", "1", "...
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L402-L484
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.generate_between_segment_description
def generate_between_segment_description( self, between_expression, get_between_description_format, get_single_item_description ): """ Generates the between segment description :param between_expression: :param get_between_description_f...
python
def generate_between_segment_description( self, between_expression, get_between_description_format, get_single_item_description ): """ Generates the between segment description :param between_expression: :param get_between_description_f...
[ "def", "generate_between_segment_description", "(", "self", ",", "between_expression", ",", "get_between_description_format", ",", "get_single_item_description", ")", ":", "description", "=", "\"\"", "between_segments", "=", "between_expression", ".", "split", "(", "'-'", ...
Generates the between segment description :param between_expression: :param get_between_description_format: :param get_single_item_description: :return: The between segment description
[ "Generates", "the", "between", "segment", "description", ":", "param", "between_expression", ":", ":", "param", "get_between_description_format", ":", ":", "param", "get_single_item_description", ":", ":", "return", ":", "The", "between", "segment", "description" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L486-L509
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.format_time
def format_time( self, hour_expression, minute_expression, second_expression='' ): """Given time parts, will contruct a formatted time description Args: hour_expression: Hours part minute_expression: Minutes part second_expression: ...
python
def format_time( self, hour_expression, minute_expression, second_expression='' ): """Given time parts, will contruct a formatted time description Args: hour_expression: Hours part minute_expression: Minutes part second_expression: ...
[ "def", "format_time", "(", "self", ",", "hour_expression", ",", "minute_expression", ",", "second_expression", "=", "''", ")", ":", "hour", "=", "int", "(", "hour_expression", ")", "period", "=", "''", "if", "self", ".", "_options", ".", "use_24hour_time_forma...
Given time parts, will contruct a formatted time description Args: hour_expression: Hours part minute_expression: Minutes part second_expression: Seconds part Returns: Formatted time description
[ "Given", "time", "parts", "will", "contruct", "a", "formatted", "time", "description", "Args", ":", "hour_expression", ":", "Hours", "part", "minute_expression", ":", "Minutes", "part", "second_expression", ":", "Seconds", "part", "Returns", ":", "Formatted", "tim...
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L511-L539
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.transform_verbosity
def transform_verbosity(self, description, use_verbose_format): """Transforms the verbosity of the expression description by stripping verbosity from original description Args: description: The description to transform use_verbose_format: If True, will leave description as it, if...
python
def transform_verbosity(self, description, use_verbose_format): """Transforms the verbosity of the expression description by stripping verbosity from original description Args: description: The description to transform use_verbose_format: If True, will leave description as it, if...
[ "def", "transform_verbosity", "(", "self", ",", "description", ",", "use_verbose_format", ")", ":", "if", "use_verbose_format", "is", "False", ":", "description", "=", "description", ".", "replace", "(", "_", "(", "\", every minute\"", ")", ",", "''", ")", "de...
Transforms the verbosity of the expression description by stripping verbosity from original description Args: description: The description to transform use_verbose_format: If True, will leave description as it, if False, will strip verbose parts second_expression: Seconds par...
[ "Transforms", "the", "verbosity", "of", "the", "expression", "description", "by", "stripping", "verbosity", "from", "original", "description", "Args", ":", "description", ":", "The", "description", "to", "transform", "use_verbose_format", ":", "If", "True", "will", ...
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L541-L556
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.transform_case
def transform_case(self, description, case_type): """Transforms the case of the expression description, based on options Args: description: The description to transform case_type: The casing type that controls the output casing second_expression: Seconds part ...
python
def transform_case(self, description, case_type): """Transforms the case of the expression description, based on options Args: description: The description to transform case_type: The casing type that controls the output casing second_expression: Seconds part ...
[ "def", "transform_case", "(", "self", ",", "description", ",", "case_type", ")", ":", "if", "case_type", "==", "CasingTypeEnum", ".", "Sentence", ":", "description", "=", "\"{}{}\"", ".", "format", "(", "description", "[", "0", "]", ".", "upper", "(", ")",...
Transforms the case of the expression description, based on options Args: description: The description to transform case_type: The casing type that controls the output casing second_expression: Seconds part Returns: The transformed description with proper ...
[ "Transforms", "the", "case", "of", "the", "expression", "description", "based", "on", "options", "Args", ":", "description", ":", "The", "description", "to", "transform", "case_type", ":", "The", "casing", "type", "that", "controls", "the", "output", "casing", ...
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L558-L576
Salamek/cron-descriptor
cron_descriptor/ExpressionDescriptor.py
ExpressionDescriptor.number_to_day
def number_to_day(self, day_number): """Returns localized day name by its CRON number Args: day_number: Number of a day Returns: Day corresponding to day_number Raises: IndexError: When day_number is not found """ return [ ...
python
def number_to_day(self, day_number): """Returns localized day name by its CRON number Args: day_number: Number of a day Returns: Day corresponding to day_number Raises: IndexError: When day_number is not found """ return [ ...
[ "def", "number_to_day", "(", "self", ",", "day_number", ")", ":", "return", "[", "calendar", ".", "day_name", "[", "6", "]", ",", "calendar", ".", "day_name", "[", "0", "]", ",", "calendar", ".", "day_name", "[", "1", "]", ",", "calendar", ".", "day_...
Returns localized day name by its CRON number Args: day_number: Number of a day Returns: Day corresponding to day_number Raises: IndexError: When day_number is not found
[ "Returns", "localized", "day", "name", "by", "its", "CRON", "number" ]
train
https://github.com/Salamek/cron-descriptor/blob/fafe86b33e190caf205836fa1c719d27c7b408c7/cron_descriptor/ExpressionDescriptor.py#L578-L596
ciena/afkak
afkak/consumer.py
Consumer.start
def start(self, start_offset): """ Starts fetching messages from Kafka and delivering them to the :attr:`.processor` function. :param int start_offset: The offset within the partition from which to start fetching. Special values include: :const:`OFFSET_EARLIEST`,...
python
def start(self, start_offset): """ Starts fetching messages from Kafka and delivering them to the :attr:`.processor` function. :param int start_offset: The offset within the partition from which to start fetching. Special values include: :const:`OFFSET_EARLIEST`,...
[ "def", "start", "(", "self", ",", "start_offset", ")", ":", "# Have we been started already, and not stopped?", "if", "self", ".", "_start_d", "is", "not", "None", ":", "raise", "RestartError", "(", "\"Start called on already-started consumer\"", ")", "# Keep track of sta...
Starts fetching messages from Kafka and delivering them to the :attr:`.processor` function. :param int start_offset: The offset within the partition from which to start fetching. Special values include: :const:`OFFSET_EARLIEST`, :const:`OFFSET_LATEST`, and :const:`OF...
[ "Starts", "fetching", "messages", "from", "Kafka", "and", "delivering", "them", "to", "the", ":", "attr", ":", ".", "processor", "function", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L236-L283
ciena/afkak
afkak/consumer.py
Consumer.shutdown
def shutdown(self): """Gracefully shutdown the consumer Consumer will complete any outstanding processing, commit its current offsets (if so configured) and stop. Returns deferred which callbacks with a tuple of: (last processed offset, last committed offset) if it was able to ...
python
def shutdown(self): """Gracefully shutdown the consumer Consumer will complete any outstanding processing, commit its current offsets (if so configured) and stop. Returns deferred which callbacks with a tuple of: (last processed offset, last committed offset) if it was able to ...
[ "def", "shutdown", "(", "self", ")", ":", "def", "_handle_shutdown_commit_success", "(", "result", ")", ":", "\"\"\"Handle the result of the commit attempted by shutdown\"\"\"", "self", ".", "_shutdown_d", ",", "d", "=", "None", ",", "self", ".", "_shutdown_d", "self"...
Gracefully shutdown the consumer Consumer will complete any outstanding processing, commit its current offsets (if so configured) and stop. Returns deferred which callbacks with a tuple of: (last processed offset, last committed offset) if it was able to successfully commit, or...
[ "Gracefully", "shutdown", "the", "consumer" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L285-L350
ciena/afkak
afkak/consumer.py
Consumer.stop
def stop(self): """ Stop the consumer and return offset of last processed message. This cancels all outstanding operations. Also, if the deferred returned by `start` hasn't been called, it is called with a tuple consisting of the last processed offset and the last committed off...
python
def stop(self): """ Stop the consumer and return offset of last processed message. This cancels all outstanding operations. Also, if the deferred returned by `start` hasn't been called, it is called with a tuple consisting of the last processed offset and the last committed off...
[ "def", "stop", "(", "self", ")", ":", "if", "self", ".", "_start_d", "is", "None", ":", "raise", "RestopError", "(", "\"Stop called on non-running consumer\"", ")", "self", ".", "_stopping", "=", "True", "# Keep track of state for debugging", "self", ".", "_state"...
Stop the consumer and return offset of last processed message. This cancels all outstanding operations. Also, if the deferred returned by `start` hasn't been called, it is called with a tuple consisting of the last processed offset and the last committed offset. :raises: :exc:`RestopE...
[ "Stop", "the", "consumer", "and", "return", "offset", "of", "last", "processed", "message", ".", "This", "cancels", "all", "outstanding", "operations", ".", "Also", "if", "the", "deferred", "returned", "by", "start", "hasn", "t", "been", "called", "it", "is"...
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L352-L407
ciena/afkak
afkak/consumer.py
Consumer.commit
def commit(self): """ Commit the offset of the message we last processed if it is different from what we believe is the last offset committed to Kafka. .. note:: It is possible to commit a smaller offset than Kafka has stored. This is by design, so we can reproc...
python
def commit(self): """ Commit the offset of the message we last processed if it is different from what we believe is the last offset committed to Kafka. .. note:: It is possible to commit a smaller offset than Kafka has stored. This is by design, so we can reproc...
[ "def", "commit", "(", "self", ")", ":", "# Can't commit without a consumer_group", "if", "not", "self", ".", "consumer_group", ":", "return", "fail", "(", "Failure", "(", "InvalidConsumerGroupError", "(", "\"Bad Group_id:{0!r}\"", ".", "format", "(", "self", ".", ...
Commit the offset of the message we last processed if it is different from what we believe is the last offset committed to Kafka. .. note:: It is possible to commit a smaller offset than Kafka has stored. This is by design, so we can reprocess a Kafka message stream if ...
[ "Commit", "the", "offset", "of", "the", "message", "we", "last", "processed", "if", "it", "is", "different", "from", "what", "we", "believe", "is", "the", "last", "offset", "committed", "to", "Kafka", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L409-L467
ciena/afkak
afkak/consumer.py
Consumer._auto_commit
def _auto_commit(self, by_count=False): """Check if we should start a new commit operation and commit""" # Check if we are even supposed to do any auto-committing if (self._stopping or self._shuttingdown or (not self._start_d) or (self._last_processed_offset is None) or ...
python
def _auto_commit(self, by_count=False): """Check if we should start a new commit operation and commit""" # Check if we are even supposed to do any auto-committing if (self._stopping or self._shuttingdown or (not self._start_d) or (self._last_processed_offset is None) or ...
[ "def", "_auto_commit", "(", "self", ",", "by_count", "=", "False", ")", ":", "# Check if we are even supposed to do any auto-committing", "if", "(", "self", ".", "_stopping", "or", "self", ".", "_shuttingdown", "or", "(", "not", "self", ".", "_start_d", ")", "or...
Check if we should start a new commit operation and commit
[ "Check", "if", "we", "should", "start", "a", "new", "commit", "operation", "and", "commit" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L475-L498
ciena/afkak
afkak/consumer.py
Consumer._retry_fetch
def _retry_fetch(self, after=None): """ Schedule a delayed :meth:`_do_fetch` call after a failure :param float after: The delay in seconds after which to do the retried fetch. If `None`, our internal :attr:`retry_delay` is used, and adjusted by :const:`REQUES...
python
def _retry_fetch(self, after=None): """ Schedule a delayed :meth:`_do_fetch` call after a failure :param float after: The delay in seconds after which to do the retried fetch. If `None`, our internal :attr:`retry_delay` is used, and adjusted by :const:`REQUES...
[ "def", "_retry_fetch", "(", "self", ",", "after", "=", "None", ")", ":", "# Have we been told to stop or shutdown? Then don't actually retry.", "if", "self", ".", "_stopping", "or", "self", ".", "_shuttingdown", "or", "self", ".", "_start_d", "is", "None", ":", "...
Schedule a delayed :meth:`_do_fetch` call after a failure :param float after: The delay in seconds after which to do the retried fetch. If `None`, our internal :attr:`retry_delay` is used, and adjusted by :const:`REQUEST_RETRY_FACTOR`.
[ "Schedule", "a", "delayed", ":", "meth", ":", "_do_fetch", "call", "after", "a", "failure" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L500-L522
ciena/afkak
afkak/consumer.py
Consumer._handle_offset_response
def _handle_offset_response(self, response): """ Handle responses to both OffsetRequest and OffsetFetchRequest, since they are similar enough. :param response: A tuple of a single OffsetFetchResponse or OffsetResponse """ # Got a response, clear our outstandi...
python
def _handle_offset_response(self, response): """ Handle responses to both OffsetRequest and OffsetFetchRequest, since they are similar enough. :param response: A tuple of a single OffsetFetchResponse or OffsetResponse """ # Got a response, clear our outstandi...
[ "def", "_handle_offset_response", "(", "self", ",", "response", ")", ":", "# Got a response, clear our outstanding request deferred", "self", ".", "_request_d", "=", "None", "# Successful request, reset our retry delay, count, etc", "self", ".", "retry_delay", "=", "self", "....
Handle responses to both OffsetRequest and OffsetFetchRequest, since they are similar enough. :param response: A tuple of a single OffsetFetchResponse or OffsetResponse
[ "Handle", "responses", "to", "both", "OffsetRequest", "and", "OffsetFetchRequest", "since", "they", "are", "similar", "enough", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L524-L552
ciena/afkak
afkak/consumer.py
Consumer._handle_offset_error
def _handle_offset_error(self, failure): """ Retry the offset fetch request if appropriate. Once the :attr:`.retry_delay` reaches our :attr:`.retry_max_delay`, we log a warning. This should perhaps be extended to abort sooner on certain errors. """ # outstanding...
python
def _handle_offset_error(self, failure): """ Retry the offset fetch request if appropriate. Once the :attr:`.retry_delay` reaches our :attr:`.retry_max_delay`, we log a warning. This should perhaps be extended to abort sooner on certain errors. """ # outstanding...
[ "def", "_handle_offset_error", "(", "self", ",", "failure", ")", ":", "# outstanding request got errback'd, clear it", "self", ".", "_request_d", "=", "None", "if", "self", ".", "_stopping", "and", "failure", ".", "check", "(", "CancelledError", ")", ":", "# Not r...
Retry the offset fetch request if appropriate. Once the :attr:`.retry_delay` reaches our :attr:`.retry_max_delay`, we log a warning. This should perhaps be extended to abort sooner on certain errors.
[ "Retry", "the", "offset", "fetch", "request", "if", "appropriate", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L554-L587
ciena/afkak
afkak/consumer.py
Consumer._send_commit_request
def _send_commit_request(self, retry_delay=None, attempt=None): """Send a commit request with our last_processed_offset""" # If there's a _commit_call, and it's not active, clear it, it probably # just called us... if self._commit_call and not self._commit_call.active(): self...
python
def _send_commit_request(self, retry_delay=None, attempt=None): """Send a commit request with our last_processed_offset""" # If there's a _commit_call, and it's not active, clear it, it probably # just called us... if self._commit_call and not self._commit_call.active(): self...
[ "def", "_send_commit_request", "(", "self", ",", "retry_delay", "=", "None", ",", "attempt", "=", "None", ")", ":", "# If there's a _commit_call, and it's not active, clear it, it probably", "# just called us...", "if", "self", ".", "_commit_call", "and", "not", "self", ...
Send a commit request with our last_processed_offset
[ "Send", "a", "commit", "request", "with", "our", "last_processed_offset" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L616-L650
ciena/afkak
afkak/consumer.py
Consumer._handle_commit_error
def _handle_commit_error(self, failure, retry_delay, attempt): """ Retry the commit request, depending on failure type Depending on the type of the failure, we retry the commit request with the latest processed offset, or callback/errback self._commit_ds """ # Check if we are st...
python
def _handle_commit_error(self, failure, retry_delay, attempt): """ Retry the commit request, depending on failure type Depending on the type of the failure, we retry the commit request with the latest processed offset, or callback/errback self._commit_ds """ # Check if we are st...
[ "def", "_handle_commit_error", "(", "self", ",", "failure", ",", "retry_delay", ",", "attempt", ")", ":", "# Check if we are stopping and the request was cancelled", "if", "self", ".", "_stopping", "and", "failure", ".", "check", "(", "CancelledError", ")", ":", "# ...
Retry the commit request, depending on failure type Depending on the type of the failure, we retry the commit request with the latest processed offset, or callback/errback self._commit_ds
[ "Retry", "the", "commit", "request", "depending", "on", "failure", "type" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L652-L691
ciena/afkak
afkak/consumer.py
Consumer._handle_processor_error
def _handle_processor_error(self, failure): """Handle a failure in the processing of a block of messages This method is called when the processor func fails while processing a block of messages. Since we can't know how best to handle a processor failure, we just :func:`errback` our :fun...
python
def _handle_processor_error(self, failure): """Handle a failure in the processing of a block of messages This method is called when the processor func fails while processing a block of messages. Since we can't know how best to handle a processor failure, we just :func:`errback` our :fun...
[ "def", "_handle_processor_error", "(", "self", ",", "failure", ")", ":", "# Check if we're stopping/stopped and the errback of the processor", "# deferred is just the cancelling we initiated. If so, we skip", "# notifying via the _start_d deferred, as it will be 'callback'd at the", "# end of...
Handle a failure in the processing of a block of messages This method is called when the processor func fails while processing a block of messages. Since we can't know how best to handle a processor failure, we just :func:`errback` our :func:`start` method's deferred to let our user kno...
[ "Handle", "a", "failure", "in", "the", "processing", "of", "a", "block", "of", "messages" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L697-L711
ciena/afkak
afkak/consumer.py
Consumer._handle_fetch_error
def _handle_fetch_error(self, failure): """A fetch request resulted in an error. Retry after our current delay When a fetch error occurs, we check to see if the Consumer is being stopped, and if so just return, trapping the CancelledError. If not, we check if the Consumer has a non-zero...
python
def _handle_fetch_error(self, failure): """A fetch request resulted in an error. Retry after our current delay When a fetch error occurs, we check to see if the Consumer is being stopped, and if so just return, trapping the CancelledError. If not, we check if the Consumer has a non-zero...
[ "def", "_handle_fetch_error", "(", "self", ",", "failure", ")", ":", "# The _request_d deferred has fired, clear it.", "self", ".", "_request_d", "=", "None", "if", "failure", ".", "check", "(", "OffsetOutOfRangeError", ")", ":", "if", "self", ".", "auto_offset_rese...
A fetch request resulted in an error. Retry after our current delay When a fetch error occurs, we check to see if the Consumer is being stopped, and if so just return, trapping the CancelledError. If not, we check if the Consumer has a non-zero setting for :attr:`request_retry_max_attem...
[ "A", "fetch", "request", "resulted", "in", "an", "error", ".", "Retry", "after", "our", "current", "delay" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L713-L763
ciena/afkak
afkak/consumer.py
Consumer._handle_fetch_response
def _handle_fetch_response(self, responses): """The callback handling the successful response from the fetch request Delivers the message list to the processor, handles per-message errors (ConsumerFetchSizeTooSmall), triggers another fetch request If the processor is still processing t...
python
def _handle_fetch_response(self, responses): """The callback handling the successful response from the fetch request Delivers the message list to the processor, handles per-message errors (ConsumerFetchSizeTooSmall), triggers another fetch request If the processor is still processing t...
[ "def", "_handle_fetch_response", "(", "self", ",", "responses", ")", ":", "# Successful fetch, reset our retry delay", "self", ".", "retry_delay", "=", "self", ".", "retry_init_delay", "self", ".", "_fetch_attempt_count", "=", "1", "# Check to see if we are still processing...
The callback handling the successful response from the fetch request Delivers the message list to the processor, handles per-message errors (ConsumerFetchSizeTooSmall), triggers another fetch request If the processor is still processing the last batch of messages, we defer this process...
[ "The", "callback", "handling", "the", "successful", "response", "from", "the", "fetch", "request" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L765-L856
ciena/afkak
afkak/consumer.py
Consumer._process_messages
def _process_messages(self, messages): """Send messages to the `processor` callback to be processed In the case we have a commit policy, we send messages to the processor in blocks no bigger than auto_commit_every_n (if set). Otherwise, we send the entire message block to be processed. ...
python
def _process_messages(self, messages): """Send messages to the `processor` callback to be processed In the case we have a commit policy, we send messages to the processor in blocks no bigger than auto_commit_every_n (if set). Otherwise, we send the entire message block to be processed. ...
[ "def", "_process_messages", "(", "self", ",", "messages", ")", ":", "# Have we been told to shutdown?", "if", "self", ".", "_shuttingdown", ":", "return", "# Do we have any messages to process?", "if", "not", "messages", ":", "# No, we're done with this block. If we had anoth...
Send messages to the `processor` callback to be processed In the case we have a commit policy, we send messages to the processor in blocks no bigger than auto_commit_every_n (if set). Otherwise, we send the entire message block to be processed.
[ "Send", "messages", "to", "the", "processor", "callback", "to", "be", "processed" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L858-L906
ciena/afkak
afkak/consumer.py
Consumer._do_fetch
def _do_fetch(self): """Send a fetch request if there isn't a request outstanding Sends a fetch request to the Kafka cluster to get messages at the current offset. When the response comes back, if there are messages, it delivers them to the :attr:`processor` callback and initiates ...
python
def _do_fetch(self): """Send a fetch request if there isn't a request outstanding Sends a fetch request to the Kafka cluster to get messages at the current offset. When the response comes back, if there are messages, it delivers them to the :attr:`processor` callback and initiates ...
[ "def", "_do_fetch", "(", "self", ")", ":", "# Check for outstanding request.", "if", "self", ".", "_request_d", ":", "log", ".", "debug", "(", "\"_do_fetch: Outstanding request: %r\"", ",", "self", ".", "_request_d", ")", "return", "# Cleanup our _retry_call, if we have...
Send a fetch request if there isn't a request outstanding Sends a fetch request to the Kafka cluster to get messages at the current offset. When the response comes back, if there are messages, it delivers them to the :attr:`processor` callback and initiates another fetch request. If t...
[ "Send", "a", "fetch", "request", "if", "there", "isn", "t", "a", "request", "outstanding" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L908-L968
ciena/afkak
afkak/consumer.py
Consumer._commit_timer_failed
def _commit_timer_failed(self, fail): """Handle an error in the commit() function Our commit() function called by the LoopingCall failed. Some error probably came back from Kafka and _check_error() raised the exception For now, just log the failure and restart the loop """ ...
python
def _commit_timer_failed(self, fail): """Handle an error in the commit() function Our commit() function called by the LoopingCall failed. Some error probably came back from Kafka and _check_error() raised the exception For now, just log the failure and restart the loop """ ...
[ "def", "_commit_timer_failed", "(", "self", ",", "fail", ")", ":", "log", ".", "warning", "(", "'_commit_timer_failed: uncaught error %r: %s in _auto_commit'", ",", "fail", ",", "fail", ".", "getBriefTraceback", "(", ")", ")", "self", ".", "_commit_looper_d", "=", ...
Handle an error in the commit() function Our commit() function called by the LoopingCall failed. Some error probably came back from Kafka and _check_error() raised the exception For now, just log the failure and restart the loop
[ "Handle", "an", "error", "in", "the", "commit", "()", "function" ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L970-L981
ciena/afkak
afkak/consumer.py
Consumer._commit_timer_stopped
def _commit_timer_stopped(self, lCall): """We're shutting down, clean up our looping call...""" if self._commit_looper is not lCall: log.warning('_commit_timer_stopped with wrong timer:%s not:%s', lCall, self._commit_looper) else: log.debug('_commi...
python
def _commit_timer_stopped(self, lCall): """We're shutting down, clean up our looping call...""" if self._commit_looper is not lCall: log.warning('_commit_timer_stopped with wrong timer:%s not:%s', lCall, self._commit_looper) else: log.debug('_commi...
[ "def", "_commit_timer_stopped", "(", "self", ",", "lCall", ")", ":", "if", "self", ".", "_commit_looper", "is", "not", "lCall", ":", "log", ".", "warning", "(", "'_commit_timer_stopped with wrong timer:%s not:%s'", ",", "lCall", ",", "self", ".", "_commit_looper",...
We're shutting down, clean up our looping call...
[ "We", "re", "shutting", "down", "clean", "up", "our", "looping", "call", "..." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/consumer.py#L983-L992
ciena/afkak
afkak/partitioner.py
pure_murmur2
def pure_murmur2(byte_array, seed=0x9747b28c): """Pure-python Murmur2 implementation. Based on java client, see org.apache.kafka.common.utils.Utils.murmur2 https://github.com/apache/kafka/blob/0.8.2/clients/src/main/java/org/apache/kafka/common/utils/Utils.java#L244 Args: byte_array: bytearray ...
python
def pure_murmur2(byte_array, seed=0x9747b28c): """Pure-python Murmur2 implementation. Based on java client, see org.apache.kafka.common.utils.Utils.murmur2 https://github.com/apache/kafka/blob/0.8.2/clients/src/main/java/org/apache/kafka/common/utils/Utils.java#L244 Args: byte_array: bytearray ...
[ "def", "pure_murmur2", "(", "byte_array", ",", "seed", "=", "0x9747b28c", ")", ":", "# Ensure byte_array arg is a bytearray", "if", "not", "isinstance", "(", "byte_array", ",", "bytearray", ")", ":", "raise", "TypeError", "(", "\"Type: %r of 'byte_array' arg must be 'by...
Pure-python Murmur2 implementation. Based on java client, see org.apache.kafka.common.utils.Utils.murmur2 https://github.com/apache/kafka/blob/0.8.2/clients/src/main/java/org/apache/kafka/common/utils/Utils.java#L244 Args: byte_array: bytearray - Raises TypeError otherwise Returns: MurmurHash2...
[ "Pure", "-", "python", "Murmur2", "implementation", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/partitioner.py#L31-L98
ciena/afkak
afkak/partitioner.py
HashedPartitioner.partition
def partition(self, key, partitions): """ Select a partition based on the hash of the key. :param key: Partition key :type key: text string or UTF-8 `bytes` or `bytearray` :param list partitions: An indexed sequence of partition identifiers. :returns: ...
python
def partition(self, key, partitions): """ Select a partition based on the hash of the key. :param key: Partition key :type key: text string or UTF-8 `bytes` or `bytearray` :param list partitions: An indexed sequence of partition identifiers. :returns: ...
[ "def", "partition", "(", "self", ",", "key", ",", "partitions", ")", ":", "return", "partitions", "[", "(", "self", ".", "_hash", "(", "key", ")", "&", "0x7FFFFFFF", ")", "%", "len", "(", "partitions", ")", "]" ]
Select a partition based on the hash of the key. :param key: Partition key :type key: text string or UTF-8 `bytes` or `bytearray` :param list partitions: An indexed sequence of partition identifiers. :returns: One of the given partition identifiers. The result wi...
[ "Select", "a", "partition", "based", "on", "the", "hash", "of", "the", "key", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/partitioner.py#L196-L208
ciena/afkak
afkak/codec.py
snappy_encode
def snappy_encode(payload, xerial_compatible=False, xerial_blocksize=32 * 1024): """ Compress the given data with the Snappy algorithm. :param bytes payload: Data to compress. :param bool xerial_compatible: If set then the stream is broken into length-prefixed blocks in ...
python
def snappy_encode(payload, xerial_compatible=False, xerial_blocksize=32 * 1024): """ Compress the given data with the Snappy algorithm. :param bytes payload: Data to compress. :param bool xerial_compatible: If set then the stream is broken into length-prefixed blocks in ...
[ "def", "snappy_encode", "(", "payload", ",", "xerial_compatible", "=", "False", ",", "xerial_blocksize", "=", "32", "*", "1024", ")", ":", "if", "not", "has_snappy", "(", ")", ":", "# FIXME This should be static, not checked every call.", "raise", "NotImplementedError...
Compress the given data with the Snappy algorithm. :param bytes payload: Data to compress. :param bool xerial_compatible: If set then the stream is broken into length-prefixed blocks in a fashion compatible with the xerial snappy library. The format winds up being:: +-----...
[ "Compress", "the", "given", "data", "with", "the", "Snappy", "algorithm", "." ]
train
https://github.com/ciena/afkak/blob/6f5e05ba6f135ea3c29cdb80efda009f7845569a/afkak/codec.py#L69-L114
escaped/django-video-encoding
video_encoding/files.py
VideoFile._get_video_info
def _get_video_info(self): """ Returns basic information about the video as dictionary. """ if not hasattr(self, '_info_cache'): encoding_backend = get_backend() try: path = os.path.abspath(self.path) except AttributeError: ...
python
def _get_video_info(self): """ Returns basic information about the video as dictionary. """ if not hasattr(self, '_info_cache'): encoding_backend = get_backend() try: path = os.path.abspath(self.path) except AttributeError: ...
[ "def", "_get_video_info", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_info_cache'", ")", ":", "encoding_backend", "=", "get_backend", "(", ")", "try", ":", "path", "=", "os", ".", "path", ".", "abspath", "(", "self", ".", "path"...
Returns basic information about the video as dictionary.
[ "Returns", "basic", "information", "about", "the", "video", "as", "dictionary", "." ]
train
https://github.com/escaped/django-video-encoding/blob/50d228dd91aca40acc7f9293808b1e87cb645e5d/video_encoding/files.py#L35-L46
escaped/django-video-encoding
video_encoding/backends/ffmpeg.py
FFmpegBackend.encode
def encode(self, source_path, target_path, params): # NOQA: C901 """ Encodes a video to a specified file. All encoder specific options are passed in using `params`. """ total_time = self.get_media_info(source_path)['duration'] cmds = [self.ffmpeg_path, '-i', source_path...
python
def encode(self, source_path, target_path, params): # NOQA: C901 """ Encodes a video to a specified file. All encoder specific options are passed in using `params`. """ total_time = self.get_media_info(source_path)['duration'] cmds = [self.ffmpeg_path, '-i', source_path...
[ "def", "encode", "(", "self", ",", "source_path", ",", "target_path", ",", "params", ")", ":", "# NOQA: C901", "total_time", "=", "self", ".", "get_media_info", "(", "source_path", ")", "[", "'duration'", "]", "cmds", "=", "[", "self", ".", "ffmpeg_path", ...
Encodes a video to a specified file. All encoder specific options are passed in using `params`.
[ "Encodes", "a", "video", "to", "a", "specified", "file", ".", "All", "encoder", "specific", "options", "are", "passed", "in", "using", "params", "." ]
train
https://github.com/escaped/django-video-encoding/blob/50d228dd91aca40acc7f9293808b1e87cb645e5d/video_encoding/backends/ffmpeg.py#L84-L139
escaped/django-video-encoding
video_encoding/backends/ffmpeg.py
FFmpegBackend.get_media_info
def get_media_info(self, video_path): """ Returns information about the given video as dict. """ cmds = [self.ffprobe_path, '-i', video_path] cmds.extend(['-print_format', 'json']) cmds.extend(['-show_format', '-show_streams']) process = self._spawn(cmds) ...
python
def get_media_info(self, video_path): """ Returns information about the given video as dict. """ cmds = [self.ffprobe_path, '-i', video_path] cmds.extend(['-print_format', 'json']) cmds.extend(['-show_format', '-show_streams']) process = self._spawn(cmds) ...
[ "def", "get_media_info", "(", "self", ",", "video_path", ")", ":", "cmds", "=", "[", "self", ".", "ffprobe_path", ",", "'-i'", ",", "video_path", "]", "cmds", ".", "extend", "(", "[", "'-print_format'", ",", "'json'", "]", ")", "cmds", ".", "extend", "...
Returns information about the given video as dict.
[ "Returns", "information", "about", "the", "given", "video", "as", "dict", "." ]
train
https://github.com/escaped/django-video-encoding/blob/50d228dd91aca40acc7f9293808b1e87cb645e5d/video_encoding/backends/ffmpeg.py#L152-L169
escaped/django-video-encoding
video_encoding/backends/ffmpeg.py
FFmpegBackend.get_thumbnail
def get_thumbnail(self, video_path, at_time=0.5): """ Extracts an image of a video and returns its path. If the requested thumbnail is not within the duration of the video an `InvalidTimeError` is thrown. """ filename = os.path.basename(video_path) filename, __ =...
python
def get_thumbnail(self, video_path, at_time=0.5): """ Extracts an image of a video and returns its path. If the requested thumbnail is not within the duration of the video an `InvalidTimeError` is thrown. """ filename = os.path.basename(video_path) filename, __ =...
[ "def", "get_thumbnail", "(", "self", ",", "video_path", ",", "at_time", "=", "0.5", ")", ":", "filename", "=", "os", ".", "path", ".", "basename", "(", "video_path", ")", "filename", ",", "__", "=", "os", ".", "path", ".", "splitext", "(", "filename", ...
Extracts an image of a video and returns its path. If the requested thumbnail is not within the duration of the video an `InvalidTimeError` is thrown.
[ "Extracts", "an", "image", "of", "a", "video", "and", "returns", "its", "path", "." ]
train
https://github.com/escaped/django-video-encoding/blob/50d228dd91aca40acc7f9293808b1e87cb645e5d/video_encoding/backends/ffmpeg.py#L171-L198
escaped/django-video-encoding
video_encoding/tasks.py
convert_all_videos
def convert_all_videos(app_label, model_name, object_pk): """ Automatically converts all videos of a given instance. """ # get instance Model = apps.get_model(app_label=app_label, model_name=model_name) instance = Model.objects.get(pk=object_pk) # search for `VideoFields` fields = insta...
python
def convert_all_videos(app_label, model_name, object_pk): """ Automatically converts all videos of a given instance. """ # get instance Model = apps.get_model(app_label=app_label, model_name=model_name) instance = Model.objects.get(pk=object_pk) # search for `VideoFields` fields = insta...
[ "def", "convert_all_videos", "(", "app_label", ",", "model_name", ",", "object_pk", ")", ":", "# get instance", "Model", "=", "apps", ".", "get_model", "(", "app_label", "=", "app_label", ",", "model_name", "=", "model_name", ")", "instance", "=", "Model", "."...
Automatically converts all videos of a given instance.
[ "Automatically", "converts", "all", "videos", "of", "a", "given", "instance", "." ]
train
https://github.com/escaped/django-video-encoding/blob/50d228dd91aca40acc7f9293808b1e87cb645e5d/video_encoding/tasks.py#L15-L33
escaped/django-video-encoding
video_encoding/tasks.py
convert_video
def convert_video(fieldfile, force=False): """ Converts a given video file into all defined formats. """ instance = fieldfile.instance field = fieldfile.field filename = os.path.basename(fieldfile.path) source_path = fieldfile.path encoding_backend = get_backend() for options in s...
python
def convert_video(fieldfile, force=False): """ Converts a given video file into all defined formats. """ instance = fieldfile.instance field = fieldfile.field filename = os.path.basename(fieldfile.path) source_path = fieldfile.path encoding_backend = get_backend() for options in s...
[ "def", "convert_video", "(", "fieldfile", ",", "force", "=", "False", ")", ":", "instance", "=", "fieldfile", ".", "instance", "field", "=", "fieldfile", ".", "field", "filename", "=", "os", ".", "path", ".", "basename", "(", "fieldfile", ".", "path", ")...
Converts a given video file into all defined formats.
[ "Converts", "a", "given", "video", "file", "into", "all", "defined", "formats", "." ]
train
https://github.com/escaped/django-video-encoding/blob/50d228dd91aca40acc7f9293808b1e87cb645e5d/video_encoding/tasks.py#L36-L90
benvanwerkhoven/kernel_tuner
kernel_tuner/strategies/firefly_algorithm.py
tune
def tune(runner, kernel_options, device_options, tuning_options): """ Find the best performing kernel configuration in the parameter space :params runner: A runner from kernel_tuner.runners :type runner: kernel_tuner.runner :param kernel_options: A dictionary with all options for the kernel. :type...
python
def tune(runner, kernel_options, device_options, tuning_options): """ Find the best performing kernel configuration in the parameter space :params runner: A runner from kernel_tuner.runners :type runner: kernel_tuner.runner :param kernel_options: A dictionary with all options for the kernel. :type...
[ "def", "tune", "(", "runner", ",", "kernel_options", ",", "device_options", ",", "tuning_options", ")", ":", "results", "=", "[", "]", "cache", "=", "{", "}", "#scale variables in x because PSO works with velocities to visit different configurations", "tuning_options", "[...
Find the best performing kernel configuration in the parameter space :params runner: A runner from kernel_tuner.runners :type runner: kernel_tuner.runner :param kernel_options: A dictionary with all options for the kernel. :type kernel_options: dict :param device_options: A dictionary with all op...
[ "Find", "the", "best", "performing", "kernel", "configuration", "in", "the", "parameter", "space" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/strategies/firefly_algorithm.py#L8-L90
benvanwerkhoven/kernel_tuner
kernel_tuner/strategies/firefly_algorithm.py
Firefly.distance_to
def distance_to(self, other): """Return Euclidian distance between self and other Firefly""" return np.linalg.norm(self.position-other.position)
python
def distance_to(self, other): """Return Euclidian distance between self and other Firefly""" return np.linalg.norm(self.position-other.position)
[ "def", "distance_to", "(", "self", ",", "other", ")", ":", "return", "np", ".", "linalg", ".", "norm", "(", "self", ".", "position", "-", "other", ".", "position", ")" ]
Return Euclidian distance between self and other Firefly
[ "Return", "Euclidian", "distance", "between", "self", "and", "other", "Firefly" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/strategies/firefly_algorithm.py#L102-L104
benvanwerkhoven/kernel_tuner
kernel_tuner/strategies/firefly_algorithm.py
Firefly.compute_intensity
def compute_intensity(self, _cost_func): """Evaluate cost function and compute intensity at this position""" self.evaluate(_cost_func) self.intensity = 1 / self.time
python
def compute_intensity(self, _cost_func): """Evaluate cost function and compute intensity at this position""" self.evaluate(_cost_func) self.intensity = 1 / self.time
[ "def", "compute_intensity", "(", "self", ",", "_cost_func", ")", ":", "self", ".", "evaluate", "(", "_cost_func", ")", "self", ".", "intensity", "=", "1", "/", "self", ".", "time" ]
Evaluate cost function and compute intensity at this position
[ "Evaluate", "cost", "function", "and", "compute", "intensity", "at", "this", "position" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/strategies/firefly_algorithm.py#L106-L109
benvanwerkhoven/kernel_tuner
kernel_tuner/strategies/firefly_algorithm.py
Firefly.move_towards
def move_towards(self, other, beta, alpha): """Move firefly towards another given beta and alpha values""" self.position += beta * (other.position - self.position) self.position += alpha * (np.random.uniform(-0.5, 0.5, len(self.position))) self.position = np.minimum(self.position, [b[1] ...
python
def move_towards(self, other, beta, alpha): """Move firefly towards another given beta and alpha values""" self.position += beta * (other.position - self.position) self.position += alpha * (np.random.uniform(-0.5, 0.5, len(self.position))) self.position = np.minimum(self.position, [b[1] ...
[ "def", "move_towards", "(", "self", ",", "other", ",", "beta", ",", "alpha", ")", ":", "self", ".", "position", "+=", "beta", "*", "(", "other", ".", "position", "-", "self", ".", "position", ")", "self", ".", "position", "+=", "alpha", "*", "(", "...
Move firefly towards another given beta and alpha values
[ "Move", "firefly", "towards", "another", "given", "beta", "and", "alpha", "values" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/strategies/firefly_algorithm.py#L111-L116
benvanwerkhoven/kernel_tuner
kernel_tuner/core.py
_default_verify_function
def _default_verify_function(instance, answer, result_host, atol, verbose): """default verify function based on numpy.allclose""" #first check if the length is the same if len(instance.arguments) != len(answer): raise TypeError("The length of argument list and provided results do not match.") #...
python
def _default_verify_function(instance, answer, result_host, atol, verbose): """default verify function based on numpy.allclose""" #first check if the length is the same if len(instance.arguments) != len(answer): raise TypeError("The length of argument list and provided results do not match.") #...
[ "def", "_default_verify_function", "(", "instance", ",", "answer", ",", "result_host", ",", "atol", ",", "verbose", ")", ":", "#first check if the length is the same", "if", "len", "(", "instance", ".", "arguments", ")", "!=", "len", "(", "answer", ")", ":", "...
default verify function based on numpy.allclose
[ "default", "verify", "function", "based", "on", "numpy", ".", "allclose" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/core.py#L278-L345
benvanwerkhoven/kernel_tuner
kernel_tuner/core.py
DeviceInterface.benchmark
def benchmark(self, func, gpu_args, instance, times, verbose): """benchmark the kernel instance""" logging.debug('benchmark ' + instance.name) logging.debug('thread block dimensions x,y,z=%d,%d,%d', *instance.threads) logging.debug('grid dimensions x,y,z=%d,%d,%d', *instance.grid) ...
python
def benchmark(self, func, gpu_args, instance, times, verbose): """benchmark the kernel instance""" logging.debug('benchmark ' + instance.name) logging.debug('thread block dimensions x,y,z=%d,%d,%d', *instance.threads) logging.debug('grid dimensions x,y,z=%d,%d,%d', *instance.grid) ...
[ "def", "benchmark", "(", "self", ",", "func", ",", "gpu_args", ",", "instance", ",", "times", ",", "verbose", ")", ":", "logging", ".", "debug", "(", "'benchmark '", "+", "instance", ".", "name", ")", "logging", ".", "debug", "(", "'thread block dimensions...
benchmark the kernel instance
[ "benchmark", "the", "kernel", "instance" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/core.py#L68-L91
benvanwerkhoven/kernel_tuner
kernel_tuner/core.py
DeviceInterface.check_kernel_output
def check_kernel_output(self, func, gpu_args, instance, answer, atol, verify, verbose): """runs the kernel once and checks the result against answer""" logging.debug('check_kernel_output') #if not using custom verify function, check if the length is the same if not verify and len(instan...
python
def check_kernel_output(self, func, gpu_args, instance, answer, atol, verify, verbose): """runs the kernel once and checks the result against answer""" logging.debug('check_kernel_output') #if not using custom verify function, check if the length is the same if not verify and len(instan...
[ "def", "check_kernel_output", "(", "self", ",", "func", ",", "gpu_args", ",", "instance", ",", "answer", ",", "atol", ",", "verify", ",", "verbose", ")", ":", "logging", ".", "debug", "(", "'check_kernel_output'", ")", "#if not using custom verify function, check ...
runs the kernel once and checks the result against answer
[ "runs", "the", "kernel", "once", "and", "checks", "the", "result", "against", "answer" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/core.py#L93-L129
benvanwerkhoven/kernel_tuner
kernel_tuner/core.py
DeviceInterface.compile_and_benchmark
def compile_and_benchmark(self, gpu_args, params, kernel_options, tuning_options): """ Compile and benchmark a kernel instance based on kernel strings and parameters """ instance_string = util.get_instance_string(params) logging.debug('compile_and_benchmark ' + instance_string) mem_usa...
python
def compile_and_benchmark(self, gpu_args, params, kernel_options, tuning_options): """ Compile and benchmark a kernel instance based on kernel strings and parameters """ instance_string = util.get_instance_string(params) logging.debug('compile_and_benchmark ' + instance_string) mem_usa...
[ "def", "compile_and_benchmark", "(", "self", ",", "gpu_args", ",", "params", ",", "kernel_options", ",", "tuning_options", ")", ":", "instance_string", "=", "util", ".", "get_instance_string", "(", "params", ")", "logging", ".", "debug", "(", "'compile_and_benchma...
Compile and benchmark a kernel instance based on kernel strings and parameters
[ "Compile", "and", "benchmark", "a", "kernel", "instance", "based", "on", "kernel", "strings", "and", "parameters" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/core.py#L132-L178
benvanwerkhoven/kernel_tuner
kernel_tuner/core.py
DeviceInterface.compile_kernel
def compile_kernel(self, instance, verbose): """compile the kernel for this specific instance""" logging.debug('compile_kernel ' + instance.name) #compile kernel_string into device func func = None try: func = self.dev.compile(instance.name, instance.kernel_string) ...
python
def compile_kernel(self, instance, verbose): """compile the kernel for this specific instance""" logging.debug('compile_kernel ' + instance.name) #compile kernel_string into device func func = None try: func = self.dev.compile(instance.name, instance.kernel_string) ...
[ "def", "compile_kernel", "(", "self", ",", "instance", ",", "verbose", ")", ":", "logging", ".", "debug", "(", "'compile_kernel '", "+", "instance", ".", "name", ")", "#compile kernel_string into device func", "func", "=", "None", "try", ":", "func", "=", "sel...
compile the kernel for this specific instance
[ "compile", "the", "kernel", "for", "this", "specific", "instance" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/core.py#L180-L200
benvanwerkhoven/kernel_tuner
kernel_tuner/core.py
DeviceInterface.copy_constant_memory_args
def copy_constant_memory_args(self, cmem_args): """adds constant memory arguments to the most recently compiled module, if using CUDA""" if self.lang == "CUDA": self.dev.copy_constant_memory_args(cmem_args) else: raise Exception("Error cannot copy constant memory argument...
python
def copy_constant_memory_args(self, cmem_args): """adds constant memory arguments to the most recently compiled module, if using CUDA""" if self.lang == "CUDA": self.dev.copy_constant_memory_args(cmem_args) else: raise Exception("Error cannot copy constant memory argument...
[ "def", "copy_constant_memory_args", "(", "self", ",", "cmem_args", ")", ":", "if", "self", ".", "lang", "==", "\"CUDA\"", ":", "self", ".", "dev", ".", "copy_constant_memory_args", "(", "cmem_args", ")", "else", ":", "raise", "Exception", "(", "\"Error cannot ...
adds constant memory arguments to the most recently compiled module, if using CUDA
[ "adds", "constant", "memory", "arguments", "to", "the", "most", "recently", "compiled", "module", "if", "using", "CUDA" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/core.py#L202-L207
benvanwerkhoven/kernel_tuner
kernel_tuner/core.py
DeviceInterface.copy_texture_memory_args
def copy_texture_memory_args(self, texmem_args): """adds texture memory arguments to the most recently compiled module, if using CUDA""" if self.lang == "CUDA": self.dev.copy_texture_memory_args(texmem_args) else: raise Exception("Error cannot copy texture memory argument...
python
def copy_texture_memory_args(self, texmem_args): """adds texture memory arguments to the most recently compiled module, if using CUDA""" if self.lang == "CUDA": self.dev.copy_texture_memory_args(texmem_args) else: raise Exception("Error cannot copy texture memory argument...
[ "def", "copy_texture_memory_args", "(", "self", ",", "texmem_args", ")", ":", "if", "self", ".", "lang", "==", "\"CUDA\"", ":", "self", ".", "dev", ".", "copy_texture_memory_args", "(", "texmem_args", ")", "else", ":", "raise", "Exception", "(", "\"Error canno...
adds texture memory arguments to the most recently compiled module, if using CUDA
[ "adds", "texture", "memory", "arguments", "to", "the", "most", "recently", "compiled", "module", "if", "using", "CUDA" ]
train
https://github.com/benvanwerkhoven/kernel_tuner/blob/cfcb5da5e510db494f8219c22566ab65d5fcbd9f/kernel_tuner/core.py#L209-L214