Unnamed: 0
int64
0
10k
repository_name
stringlengths
7
54
func_path_in_repository
stringlengths
5
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
100
30.3k
language
stringclasses
1 value
func_code_string
stringlengths
100
30.3k
func_code_tokens
stringlengths
138
33.2k
func_documentation_string
stringlengths
1
15k
func_documentation_tokens
stringlengths
5
5.14k
split_name
stringclasses
1 value
func_code_url
stringlengths
91
315
5,900
ryanmcgrath/twython
twython/api.py
Twython.request
def request(self, endpoint, method='GET', params=None, version='1.1', json_encoded=False): """Return dict of response received from Twitter's API :param endpoint: (required) Full url or Twitter API endpoint (e.g. search/tweets) :type endpoint: string :param meth...
python
def request(self, endpoint, method='GET', params=None, version='1.1', json_encoded=False): """Return dict of response received from Twitter's API :param endpoint: (required) Full url or Twitter API endpoint (e.g. search/tweets) :type endpoint: string :param meth...
['def', 'request', '(', 'self', ',', 'endpoint', ',', 'method', '=', "'GET'", ',', 'params', '=', 'None', ',', 'version', '=', "'1.1'", ',', 'json_encoded', '=', 'False', ')', ':', 'if', 'endpoint', '.', 'startswith', '(', "'http://'", ')', ':', 'raise', 'TwythonError', '(', "'api.twitter.com is restricted to SSL/TLS t...
Return dict of response received from Twitter's API :param endpoint: (required) Full url or Twitter API endpoint (e.g. search/tweets) :type endpoint: string :param method: (optional) Method of accessing data, either GET, POST or DELETE. (default G...
['Return', 'dict', 'of', 'response', 'received', 'from', 'Twitter', 's', 'API']
train
https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/api.py#L238-L274
5,901
inasafe/inasafe
safe/report/impact_report.py
ImpactReport._check_layer_count
def _check_layer_count(self, layer): """Check for the validity of the layer. :param layer: QGIS layer :type layer: qgis.core.QgsVectorLayer :return: """ if layer: if not layer.isValid(): raise ImpactReport.LayerException('Layer is not valid') ...
python
def _check_layer_count(self, layer): """Check for the validity of the layer. :param layer: QGIS layer :type layer: qgis.core.QgsVectorLayer :return: """ if layer: if not layer.isValid(): raise ImpactReport.LayerException('Layer is not valid') ...
['def', '_check_layer_count', '(', 'self', ',', 'layer', ')', ':', 'if', 'layer', ':', 'if', 'not', 'layer', '.', 'isValid', '(', ')', ':', 'raise', 'ImpactReport', '.', 'LayerException', '(', "'Layer is not valid'", ')', 'if', 'isinstance', '(', 'layer', ',', 'QgsRasterLayer', ')', ':', "# can't check feature count of...
Check for the validity of the layer. :param layer: QGIS layer :type layer: qgis.core.QgsVectorLayer :return:
['Check', 'for', 'the', 'validity', 'of', 'the', 'layer', '.']
train
https://github.com/inasafe/inasafe/blob/831d60abba919f6d481dc94a8d988cc205130724/safe/report/impact_report.py#L455-L471
5,902
apache/incubator-superset
superset/connectors/connector_registry.py
ConnectorRegistry.get_eager_datasource
def get_eager_datasource(cls, session, datasource_type, datasource_id): """Returns datasource with columns and metrics.""" datasource_class = ConnectorRegistry.sources[datasource_type] return ( session.query(datasource_class) .options( subqueryload(datasou...
python
def get_eager_datasource(cls, session, datasource_type, datasource_id): """Returns datasource with columns and metrics.""" datasource_class = ConnectorRegistry.sources[datasource_type] return ( session.query(datasource_class) .options( subqueryload(datasou...
['def', 'get_eager_datasource', '(', 'cls', ',', 'session', ',', 'datasource_type', ',', 'datasource_id', ')', ':', 'datasource_class', '=', 'ConnectorRegistry', '.', 'sources', '[', 'datasource_type', ']', 'return', '(', 'session', '.', 'query', '(', 'datasource_class', ')', '.', 'options', '(', 'subqueryload', '(', '...
Returns datasource with columns and metrics.
['Returns', 'datasource', 'with', 'columns', 'and', 'metrics', '.']
train
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/connectors/connector_registry.py#L76-L87
5,903
jasonbot/arcrest
arcrest/utils.py
pythonvaluetotime
def pythonvaluetotime(time_val): "Convert a time or time range from Python datetime to ArcGIS REST server" if time_val is None: return None elif isinstance(time_val, numeric): return str(long(time_val * 1000.0)) elif isinstance(time_val, date): dtlist = [time_val.year, time_val.m...
python
def pythonvaluetotime(time_val): "Convert a time or time range from Python datetime to ArcGIS REST server" if time_val is None: return None elif isinstance(time_val, numeric): return str(long(time_val * 1000.0)) elif isinstance(time_val, date): dtlist = [time_val.year, time_val.m...
['def', 'pythonvaluetotime', '(', 'time_val', ')', ':', 'if', 'time_val', 'is', 'None', ':', 'return', 'None', 'elif', 'isinstance', '(', 'time_val', ',', 'numeric', ')', ':', 'return', 'str', '(', 'long', '(', 'time_val', '*', '1000.0', ')', ')', 'elif', 'isinstance', '(', 'time_val', ',', 'date', ')', ':', 'dtlist', ...
Convert a time or time range from Python datetime to ArcGIS REST server
['Convert', 'a', 'time', 'or', 'time', 'range', 'from', 'Python', 'datetime', 'to', 'ArcGIS', 'REST', 'server']
train
https://github.com/jasonbot/arcrest/blob/b1ba71fd59bb6349415e7879d753d307dbc0da26/arcrest/utils.py#L35-L58
5,904
ramazanpolat/prodict
prodict/__init__.py
Prodict.attr_names
def attr_names(cls) -> List[str]: """ Returns annotated attribute names :return: List[str] """ return [k for k, v in cls.attr_types().items()]
python
def attr_names(cls) -> List[str]: """ Returns annotated attribute names :return: List[str] """ return [k for k, v in cls.attr_types().items()]
['def', 'attr_names', '(', 'cls', ')', '->', 'List', '[', 'str', ']', ':', 'return', '[', 'k', 'for', 'k', ',', 'v', 'in', 'cls', '.', 'attr_types', '(', ')', '.', 'items', '(', ')', ']']
Returns annotated attribute names :return: List[str]
['Returns', 'annotated', 'attribute', 'names', ':', 'return', ':', 'List', '[', 'str', ']']
train
https://github.com/ramazanpolat/prodict/blob/e67e34738af1542f3b6c91c0e838f5be9a84aad4/prodict/__init__.py#L49-L54
5,905
tensorpack/tensorpack
examples/FasterRCNN/utils/generate_anchors.py
generate_anchors
def generate_anchors(base_size=16, ratios=[0.5, 1, 2], scales=2**np.arange(3, 6)): """ Generate anchor (reference) windows by enumerating aspect ratios X scales wrt a reference (0, 0, 15, 15) window. """ base_anchor = np.array([1, 1, base_size, base_size], dtype='float32') - 1 ...
python
def generate_anchors(base_size=16, ratios=[0.5, 1, 2], scales=2**np.arange(3, 6)): """ Generate anchor (reference) windows by enumerating aspect ratios X scales wrt a reference (0, 0, 15, 15) window. """ base_anchor = np.array([1, 1, base_size, base_size], dtype='float32') - 1 ...
['def', 'generate_anchors', '(', 'base_size', '=', '16', ',', 'ratios', '=', '[', '0.5', ',', '1', ',', '2', ']', ',', 'scales', '=', '2', '**', 'np', '.', 'arange', '(', '3', ',', '6', ')', ')', ':', 'base_anchor', '=', 'np', '.', 'array', '(', '[', '1', ',', '1', ',', 'base_size', ',', 'base_size', ']', ',', 'dtype',...
Generate anchor (reference) windows by enumerating aspect ratios X scales wrt a reference (0, 0, 15, 15) window.
['Generate', 'anchor', '(', 'reference', ')', 'windows', 'by', 'enumerating', 'aspect', 'ratios', 'X', 'scales', 'wrt', 'a', 'reference', '(', '0', '0', '15', '15', ')', 'window', '.']
train
https://github.com/tensorpack/tensorpack/blob/d7a13cb74c9066bc791d7aafc3b744b60ee79a9f/examples/FasterRCNN/utils/generate_anchors.py#L41-L52
5,906
Nachtfeuer/pipeline
spline/matrix.py
Matrix.run_matrix_ordered
def run_matrix_ordered(self, process_data): """ Running pipelines one after the other. Returns dict: with two fields: success True/False and captured output (list of str). """ output = [] for entry in self.matrix: env = entry['env'].copy() ...
python
def run_matrix_ordered(self, process_data): """ Running pipelines one after the other. Returns dict: with two fields: success True/False and captured output (list of str). """ output = [] for entry in self.matrix: env = entry['env'].copy() ...
['def', 'run_matrix_ordered', '(', 'self', ',', 'process_data', ')', ':', 'output', '=', '[', ']', 'for', 'entry', 'in', 'self', '.', 'matrix', ':', 'env', '=', 'entry', '[', "'env'", ']', '.', 'copy', '(', ')', 'env', '.', 'update', '(', '{', "'PIPELINE_MATRIX'", ':', 'entry', '[', "'name'", ']', '}', ')', 'if', 'Matr...
Running pipelines one after the other. Returns dict: with two fields: success True/False and captured output (list of str).
['Running', 'pipelines', 'one', 'after', 'the', 'other', '.']
train
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/matrix.py#L136-L157
5,907
graphql-python/graphql-core-next
graphql/utilities/coerce_value.py
coerce_value
def coerce_value( value: Any, type_: GraphQLInputType, blame_node: Node = None, path: Path = None ) -> CoercedValue: """Coerce a Python value given a GraphQL Type. Returns either a value which is valid for the provided type or a list of encountered coercion errors. """ # A value must be provide...
python
def coerce_value( value: Any, type_: GraphQLInputType, blame_node: Node = None, path: Path = None ) -> CoercedValue: """Coerce a Python value given a GraphQL Type. Returns either a value which is valid for the provided type or a list of encountered coercion errors. """ # A value must be provide...
['def', 'coerce_value', '(', 'value', ':', 'Any', ',', 'type_', ':', 'GraphQLInputType', ',', 'blame_node', ':', 'Node', '=', 'None', ',', 'path', ':', 'Path', '=', 'None', ')', '->', 'CoercedValue', ':', '# A value must be provided if the type is non-null.', 'if', 'is_non_null_type', '(', 'type_', ')', ':', 'if', 'val...
Coerce a Python value given a GraphQL Type. Returns either a value which is valid for the provided type or a list of encountered coercion errors.
['Coerce', 'a', 'Python', 'value', 'given', 'a', 'GraphQL', 'Type', '.']
train
https://github.com/graphql-python/graphql-core-next/blob/073dce3f002f897d40f9348ffd8f107815160540/graphql/utilities/coerce_value.py#L33-L179
5,908
log2timeline/plaso
plaso/parsers/pe.py
PEParser._GetLoadConfigTimestamp
def _GetLoadConfigTimestamp(self, pefile_object): """Retrieves the timestamp from the Load Configuration directory. Args: pefile_object (pefile.PE): pefile object. Returns: int: load configuration timestamps or None if there are none present. """ if not hasattr(pefile_object, 'DIRECTOR...
python
def _GetLoadConfigTimestamp(self, pefile_object): """Retrieves the timestamp from the Load Configuration directory. Args: pefile_object (pefile.PE): pefile object. Returns: int: load configuration timestamps or None if there are none present. """ if not hasattr(pefile_object, 'DIRECTOR...
['def', '_GetLoadConfigTimestamp', '(', 'self', ',', 'pefile_object', ')', ':', 'if', 'not', 'hasattr', '(', 'pefile_object', ',', "'DIRECTORY_ENTRY_LOAD_CONFIG'", ')', ':', 'return', 'None', 'timestamp', '=', 'getattr', '(', 'pefile_object', '.', 'DIRECTORY_ENTRY_LOAD_CONFIG', '.', 'struct', ',', "'TimeDateStamp'", ',...
Retrieves the timestamp from the Load Configuration directory. Args: pefile_object (pefile.PE): pefile object. Returns: int: load configuration timestamps or None if there are none present.
['Retrieves', 'the', 'timestamp', 'from', 'the', 'Load', 'Configuration', 'directory', '.']
train
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/parsers/pe.py#L120-L133
5,909
collectiveacuity/labPack
labpack/storage/google/drive.py
driveClient._import
def _import(self, record_key, record_data, overwrite=True, last_modified=0.0, **kwargs): ''' a helper method for other storage clients to import into appdata :param record_key: string with key for record :param record_data: byte data for body of record :...
python
def _import(self, record_key, record_data, overwrite=True, last_modified=0.0, **kwargs): ''' a helper method for other storage clients to import into appdata :param record_key: string with key for record :param record_data: byte data for body of record :...
['def', '_import', '(', 'self', ',', 'record_key', ',', 'record_data', ',', 'overwrite', '=', 'True', ',', 'last_modified', '=', '0.0', ',', '*', '*', 'kwargs', ')', ':', 'title', '=', "'%s._import'", '%', 'self', '.', '__class__', '.', '__name__', '# verify permissions', 'if', 'not', 'self', '.', 'permissions_write', ...
a helper method for other storage clients to import into appdata :param record_key: string with key for record :param record_data: byte data for body of record :param overwrite: [optional] boolean to overwrite existing records :param last_modified: [optional] float to record...
['a', 'helper', 'method', 'for', 'other', 'storage', 'clients', 'to', 'import', 'into', 'appdata', ':', 'param', 'record_key', ':', 'string', 'with', 'key', 'for', 'record', ':', 'param', 'record_data', ':', 'byte', 'data', 'for', 'body', 'of', 'record', ':', 'param', 'overwrite', ':', '[', 'optional', ']', 'boolean', ...
train
https://github.com/collectiveacuity/labPack/blob/52949ece35e72e3cc308f54d9ffa6bfbd96805b8/labpack/storage/google/drive.py#L394-L527
5,910
numenta/htmresearch
htmresearch/algorithms/multiconnections.py
Multiconnections.setPermanences
def setPermanences(self, segments, presynapticCellsBySource, permanence): """ Set the permanence of a specific set of synapses. Any synapses that don't exist will be initialized. Any existing synapses will be overwritten. Conceptually, this method takes a list of [segment, presynapticCell] pairs an...
python
def setPermanences(self, segments, presynapticCellsBySource, permanence): """ Set the permanence of a specific set of synapses. Any synapses that don't exist will be initialized. Any existing synapses will be overwritten. Conceptually, this method takes a list of [segment, presynapticCell] pairs an...
['def', 'setPermanences', '(', 'self', ',', 'segments', ',', 'presynapticCellsBySource', ',', 'permanence', ')', ':', 'permanences', '=', 'np', '.', 'repeat', '(', 'np', '.', 'float32', '(', 'permanence', ')', ',', 'len', '(', 'segments', ')', ')', 'for', 'source', ',', 'connections', 'in', 'self', '.', 'connectionsByS...
Set the permanence of a specific set of synapses. Any synapses that don't exist will be initialized. Any existing synapses will be overwritten. Conceptually, this method takes a list of [segment, presynapticCell] pairs and initializes their permanence. For each segment, one synapse is added (although o...
['Set', 'the', 'permanence', 'of', 'a', 'specific', 'set', 'of', 'synapses', '.', 'Any', 'synapses', 'that', 'don', 't', 'exist', 'will', 'be', 'initialized', '.', 'Any', 'existing', 'synapses', 'will', 'be', 'overwritten', '.']
train
https://github.com/numenta/htmresearch/blob/70c096b09a577ea0432c3f3bfff4442d4871b7aa/htmresearch/algorithms/multiconnections.py#L109-L137
5,911
DLR-RM/RAFCON
source/rafcon/gui/controllers/utils/tree_view_controller.py
TreeViewController.update_selection_sm_prior_condition
def update_selection_sm_prior_condition(self, state_row_iter, selected_model_list, sm_selected_model_list): """State machine prior update of tree selection for one tree model row""" selected_path = self.tree_store.get_path(state_row_iter) tree_model_row = self.tree_store[selected_path] m...
python
def update_selection_sm_prior_condition(self, state_row_iter, selected_model_list, sm_selected_model_list): """State machine prior update of tree selection for one tree model row""" selected_path = self.tree_store.get_path(state_row_iter) tree_model_row = self.tree_store[selected_path] m...
['def', 'update_selection_sm_prior_condition', '(', 'self', ',', 'state_row_iter', ',', 'selected_model_list', ',', 'sm_selected_model_list', ')', ':', 'selected_path', '=', 'self', '.', 'tree_store', '.', 'get_path', '(', 'state_row_iter', ')', 'tree_model_row', '=', 'self', '.', 'tree_store', '[', 'selected_path', ']...
State machine prior update of tree selection for one tree model row
['State', 'machine', 'prior', 'update', 'of', 'tree', 'selection', 'for', 'one', 'tree', 'model', 'row']
train
https://github.com/DLR-RM/RAFCON/blob/24942ef1a904531f49ab8830a1dbb604441be498/source/rafcon/gui/controllers/utils/tree_view_controller.py#L775-L785
5,912
grycap/cpyutils
iputils.py
ip2hex
def ip2hex(ip): ''' Converts an ip to a hex value that can be used with a hex bit mask ''' parts = ip.split(".") if len(parts) != 4: return None ipv = 0 for part in parts: try: p = int(part) if p < 0 or p > 255: return None ipv = (ipv << 8) + p ...
python
def ip2hex(ip): ''' Converts an ip to a hex value that can be used with a hex bit mask ''' parts = ip.split(".") if len(parts) != 4: return None ipv = 0 for part in parts: try: p = int(part) if p < 0 or p > 255: return None ipv = (ipv << 8) + p ...
['def', 'ip2hex', '(', 'ip', ')', ':', 'parts', '=', 'ip', '.', 'split', '(', '"."', ')', 'if', 'len', '(', 'parts', ')', '!=', '4', ':', 'return', 'None', 'ipv', '=', '0', 'for', 'part', 'in', 'parts', ':', 'try', ':', 'p', '=', 'int', '(', 'part', ')', 'if', 'p', '<', '0', 'or', 'p', '>', '255', ':', 'return', 'None'...
Converts an ip to a hex value that can be used with a hex bit mask
['Converts', 'an', 'ip', 'to', 'a', 'hex', 'value', 'that', 'can', 'be', 'used', 'with', 'a', 'hex', 'bit', 'mask']
train
https://github.com/grycap/cpyutils/blob/fa966fc6d2ae1e1e799e19941561aa79b617f1b1/iputils.py#L19-L33
5,913
pygridtools/gridmap
examples/manual.py
compute_factorial
def compute_factorial(n): """ computes factorial of n """ sleep_walk(10) ret = 1 for i in range(n): ret = ret * (i + 1) return ret
python
def compute_factorial(n): """ computes factorial of n """ sleep_walk(10) ret = 1 for i in range(n): ret = ret * (i + 1) return ret
['def', 'compute_factorial', '(', 'n', ')', ':', 'sleep_walk', '(', '10', ')', 'ret', '=', '1', 'for', 'i', 'in', 'range', '(', 'n', ')', ':', 'ret', '=', 'ret', '*', '(', 'i', '+', '1', ')', 'return', 'ret']
computes factorial of n
['computes', 'factorial', 'of', 'n']
train
https://github.com/pygridtools/gridmap/blob/be4fb1478ab8d19fa3acddecdf1a5d8bd3789127/examples/manual.py#L49-L57
5,914
geertj/gruvi
lib/gruvi/logging.py
ContextLogger.thread_info
def thread_info(self): """Return a string identifying the current thread and fiber.""" tid = threading.current_thread().name if tid == 'MainThread': tid = 'Main' current = fibers.current() fid = getattr(current, 'name') if current.parent else 'Root' return '{}...
python
def thread_info(self): """Return a string identifying the current thread and fiber.""" tid = threading.current_thread().name if tid == 'MainThread': tid = 'Main' current = fibers.current() fid = getattr(current, 'name') if current.parent else 'Root' return '{}...
['def', 'thread_info', '(', 'self', ')', ':', 'tid', '=', 'threading', '.', 'current_thread', '(', ')', '.', 'name', 'if', 'tid', '==', "'MainThread'", ':', 'tid', '=', "'Main'", 'current', '=', 'fibers', '.', 'current', '(', ')', 'fid', '=', 'getattr', '(', 'current', ',', "'name'", ')', 'if', 'current', '.', 'parent'...
Return a string identifying the current thread and fiber.
['Return', 'a', 'string', 'identifying', 'the', 'current', 'thread', 'and', 'fiber', '.']
train
https://github.com/geertj/gruvi/blob/1d77ca439600b6ea7a19aa1ee85dca0f3be3f3f8/lib/gruvi/logging.py#L79-L86
5,915
lsbardel/python-stdnet
stdnet/odm/struct.py
Set.difference_update
def difference_update(self, values): '''Remove an iterable of *values* from the set.''' d = self.value_pickler.dumps return self.cache.remove(tuple((d(v) for v in values)))
python
def difference_update(self, values): '''Remove an iterable of *values* from the set.''' d = self.value_pickler.dumps return self.cache.remove(tuple((d(v) for v in values)))
['def', 'difference_update', '(', 'self', ',', 'values', ')', ':', 'd', '=', 'self', '.', 'value_pickler', '.', 'dumps', 'return', 'self', '.', 'cache', '.', 'remove', '(', 'tuple', '(', '(', 'd', '(', 'v', ')', 'for', 'v', 'in', 'values', ')', ')', ')']
Remove an iterable of *values* from the set.
['Remove', 'an', 'iterable', 'of', '*', 'values', '*', 'from', 'the', 'set', '.']
train
https://github.com/lsbardel/python-stdnet/blob/78db5320bdedc3f28c5e4f38cda13a4469e35db7/stdnet/odm/struct.py#L630-L633
5,916
tradenity/python-sdk
tradenity/resources/country.py
Country.create_country
def create_country(cls, country, **kwargs): """Create Country Create a new Country This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_country(country, async=True) >>> result = thre...
python
def create_country(cls, country, **kwargs): """Create Country Create a new Country This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_country(country, async=True) >>> result = thre...
['def', 'create_country', '(', 'cls', ',', 'country', ',', '*', '*', 'kwargs', ')', ':', 'kwargs', '[', "'_return_http_data_only'", ']', '=', 'True', 'if', 'kwargs', '.', 'get', '(', "'async'", ')', ':', 'return', 'cls', '.', '_create_country_with_http_info', '(', 'country', ',', '*', '*', 'kwargs', ')', 'else', ':', '...
Create Country Create a new Country This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_country(country, async=True) >>> result = thread.get() :param async bool :param Coun...
['Create', 'Country']
train
https://github.com/tradenity/python-sdk/blob/d13fbe23f4d6ff22554c6d8d2deaf209371adaf1/tradenity/resources/country.py#L401-L421
5,917
mitsei/dlkit
dlkit/handcar/learning/sessions.py
ObjectiveAdminSession.get_objective_form_for_create
def get_objective_form_for_create(self, objective_record_types=None): """Gets the objective form for creating new objectives. A new form should be requested for each create transaction. arg: objectiveRecordTypes (osid.type.Type): array of objective record types return:...
python
def get_objective_form_for_create(self, objective_record_types=None): """Gets the objective form for creating new objectives. A new form should be requested for each create transaction. arg: objectiveRecordTypes (osid.type.Type): array of objective record types return:...
['def', 'get_objective_form_for_create', '(', 'self', ',', 'objective_record_types', '=', 'None', ')', ':', 'if', 'objective_record_types', 'is', 'None', ':', 'pass', '# Still need to deal with the record_types argument', 'objective_form', '=', 'objects', '.', 'ObjectiveForm', '(', ')', 'self', '.', '_forms', '[', 'obj...
Gets the objective form for creating new objectives. A new form should be requested for each create transaction. arg: objectiveRecordTypes (osid.type.Type): array of objective record types return: (osid.learning.ObjectiveForm) - the objective form raise: NullArgument ...
['Gets', 'the', 'objective', 'form', 'for', 'creating', 'new', 'objectives', '.', 'A', 'new', 'form', 'should', 'be', 'requested', 'for', 'each', 'create', 'transaction', '.', 'arg', ':', 'objectiveRecordTypes', '(', 'osid', '.', 'type', '.', 'Type', ')', ':', 'array', 'of', 'objective', 'record', 'types', 'return', ':...
train
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/handcar/learning/sessions.py#L753-L771
5,918
astrocatalogs/astrocats
astrocats/catalog/analysis.py
Analysis._file_nums_str
def _file_nums_str(self, n_all, n_type, n_ign): """Construct a string showing the number of different file types. Returns ------- f_str : str """ # 'other' is the difference between all and named n_oth = n_all - np.sum(n_type) f_str = "{} Files".format(n...
python
def _file_nums_str(self, n_all, n_type, n_ign): """Construct a string showing the number of different file types. Returns ------- f_str : str """ # 'other' is the difference between all and named n_oth = n_all - np.sum(n_type) f_str = "{} Files".format(n...
['def', '_file_nums_str', '(', 'self', ',', 'n_all', ',', 'n_type', ',', 'n_ign', ')', ':', "# 'other' is the difference between all and named", 'n_oth', '=', 'n_all', '-', 'np', '.', 'sum', '(', 'n_type', ')', 'f_str', '=', '"{} Files"', '.', 'format', '(', 'n_all', ')', '+', '" ("', 'if', 'len', '(', 'n_type', ')', '...
Construct a string showing the number of different file types. Returns ------- f_str : str
['Construct', 'a', 'string', 'showing', 'the', 'number', 'of', 'different', 'file', 'types', '.']
train
https://github.com/astrocatalogs/astrocats/blob/11abc3131c6366ecd23964369e55ff264add7805/astrocats/catalog/analysis.py#L131-L147
5,919
mathandy/svgpathtools
svgpathtools/smoothing.py
kinks
def kinks(path, tol=1e-8): """returns indices of segments that start on a non-differentiable joint.""" kink_list = [] for idx in range(len(path)): if idx == 0 and not path.isclosed(): continue try: u = path[(idx - 1) % len(path)].unit_tangent(1) v = path[i...
python
def kinks(path, tol=1e-8): """returns indices of segments that start on a non-differentiable joint.""" kink_list = [] for idx in range(len(path)): if idx == 0 and not path.isclosed(): continue try: u = path[(idx - 1) % len(path)].unit_tangent(1) v = path[i...
['def', 'kinks', '(', 'path', ',', 'tol', '=', '1e-8', ')', ':', 'kink_list', '=', '[', ']', 'for', 'idx', 'in', 'range', '(', 'len', '(', 'path', ')', ')', ':', 'if', 'idx', '==', '0', 'and', 'not', 'path', '.', 'isclosed', '(', ')', ':', 'continue', 'try', ':', 'u', '=', 'path', '[', '(', 'idx', '-', '1', ')', '%', '...
returns indices of segments that start on a non-differentiable joint.
['returns', 'indices', 'of', 'segments', 'that', 'start', 'on', 'a', 'non', '-', 'differentiable', 'joint', '.']
train
https://github.com/mathandy/svgpathtools/blob/fd7348a1dfd88b65ea61da02325c6605aedf8c4f/svgpathtools/smoothing.py#L23-L39
5,920
etingof/pysnmp
pysnmp/smi/rfc1902.py
ObjectType.resolveWithMib
def resolveWithMib(self, mibViewController): """Perform MIB variable ID and associated value conversion. Parameters ---------- mibViewController : :py:class:`~pysnmp.smi.view.MibViewController` class instance representing MIB browsing functionality. Returns ...
python
def resolveWithMib(self, mibViewController): """Perform MIB variable ID and associated value conversion. Parameters ---------- mibViewController : :py:class:`~pysnmp.smi.view.MibViewController` class instance representing MIB browsing functionality. Returns ...
['def', 'resolveWithMib', '(', 'self', ',', 'mibViewController', ')', ':', 'if', 'self', '.', '_state', '&', 'self', '.', 'ST_CLEAM', ':', 'return', 'self', 'self', '.', '_args', '[', '0', ']', '.', 'resolveWithMib', '(', 'mibViewController', ')', 'MibScalar', ',', 'MibTableColumn', '=', 'mibViewController', '.', 'mibB...
Perform MIB variable ID and associated value conversion. Parameters ---------- mibViewController : :py:class:`~pysnmp.smi.view.MibViewController` class instance representing MIB browsing functionality. Returns ------- : :py:class:`~pysnmp.smi.rfc1902.ObjectT...
['Perform', 'MIB', 'variable', 'ID', 'and', 'associated', 'value', 'conversion', '.']
train
https://github.com/etingof/pysnmp/blob/cde062dd42f67dfd2d7686286a322d40e9c3a4b7/pysnmp/smi/rfc1902.py#L911-L993
5,921
SBRG/ssbio
ssbio/protein/sequence/seqprop.py
SeqProp.get_biopython_pepstats
def get_biopython_pepstats(self, clean_seq=False): """Run Biopython's built in ProteinAnalysis module and store statistics in the ``annotations`` attribute.""" if self.seq: if clean_seq: # TODO: can make this a property of the SeqProp class seq = self.seq_str.replace('X', '...
python
def get_biopython_pepstats(self, clean_seq=False): """Run Biopython's built in ProteinAnalysis module and store statistics in the ``annotations`` attribute.""" if self.seq: if clean_seq: # TODO: can make this a property of the SeqProp class seq = self.seq_str.replace('X', '...
['def', 'get_biopython_pepstats', '(', 'self', ',', 'clean_seq', '=', 'False', ')', ':', 'if', 'self', '.', 'seq', ':', 'if', 'clean_seq', ':', '# TODO: can make this a property of the SeqProp class', 'seq', '=', 'self', '.', 'seq_str', '.', 'replace', '(', "'X'", ',', "''", ')', '.', 'replace', '(', "'U'", ',', "''", ...
Run Biopython's built in ProteinAnalysis module and store statistics in the ``annotations`` attribute.
['Run', 'Biopython', 's', 'built', 'in', 'ProteinAnalysis', 'module', 'and', 'store', 'statistics', 'in', 'the', 'annotations', 'attribute', '.']
train
https://github.com/SBRG/ssbio/blob/e9449e64ffc1a1f5ad07e5849aa12a650095f8a2/ssbio/protein/sequence/seqprop.py#L660-L679
5,922
pgmpy/pgmpy
pgmpy/inference/ExactInference.py
VariableElimination.query
def query(self, variables, evidence=None, elimination_order=None, joint=True): """ Parameters ---------- variables: list list of variables for which you want to compute the probability evidence: dict a dict key, value pair as {var: state_of_var_observed} ...
python
def query(self, variables, evidence=None, elimination_order=None, joint=True): """ Parameters ---------- variables: list list of variables for which you want to compute the probability evidence: dict a dict key, value pair as {var: state_of_var_observed} ...
['def', 'query', '(', 'self', ',', 'variables', ',', 'evidence', '=', 'None', ',', 'elimination_order', '=', 'None', ',', 'joint', '=', 'True', ')', ':', 'return', 'self', '.', '_variable_elimination', '(', 'variables', ',', "'marginalize'", ',', 'evidence', '=', 'evidence', ',', 'elimination_order', '=', 'elimination_...
Parameters ---------- variables: list list of variables for which you want to compute the probability evidence: dict a dict key, value pair as {var: state_of_var_observed} None if no evidence elimination_order: list order of variable elim...
['Parameters', '----------', 'variables', ':', 'list', 'list', 'of', 'variables', 'for', 'which', 'you', 'want', 'to', 'compute', 'the', 'probability']
train
https://github.com/pgmpy/pgmpy/blob/9381a66aba3c3871d3ccd00672b148d17d63239e/pgmpy/inference/ExactInference.py#L109-L143
5,923
probcomp/crosscat
src/utils/unionfind.py
union
def union(a, b): """Assert equality of two nodes a and b so find(a) is find(b).""" a = find(a) b = find(b) if a is not b: if a.rank < b.rank: a.parent = b elif b.rank < a.rank: b.parent = a else: b.parent = a a.rank += 1
python
def union(a, b): """Assert equality of two nodes a and b so find(a) is find(b).""" a = find(a) b = find(b) if a is not b: if a.rank < b.rank: a.parent = b elif b.rank < a.rank: b.parent = a else: b.parent = a a.rank += 1
['def', 'union', '(', 'a', ',', 'b', ')', ':', 'a', '=', 'find', '(', 'a', ')', 'b', '=', 'find', '(', 'b', ')', 'if', 'a', 'is', 'not', 'b', ':', 'if', 'a', '.', 'rank', '<', 'b', '.', 'rank', ':', 'a', '.', 'parent', '=', 'b', 'elif', 'b', '.', 'rank', '<', 'a', '.', 'rank', ':', 'b', '.', 'parent', '=', 'a', 'else',...
Assert equality of two nodes a and b so find(a) is find(b).
['Assert', 'equality', 'of', 'two', 'nodes', 'a', 'and', 'b', 'so', 'find', '(', 'a', ')', 'is', 'find', '(', 'b', ')', '.']
train
https://github.com/probcomp/crosscat/blob/4a05bddb06a45f3b7b3e05e095720f16257d1535/src/utils/unionfind.py#L45-L56
5,924
jbloomlab/phydms
phydmslib/models.py
GammaDistributedModel.branchScale
def branchScale(self): """See docs for `Model` abstract base class.""" bscales = [m.branchScale for m in self._models] return (self.catweights * bscales).sum()
python
def branchScale(self): """See docs for `Model` abstract base class.""" bscales = [m.branchScale for m in self._models] return (self.catweights * bscales).sum()
['def', 'branchScale', '(', 'self', ')', ':', 'bscales', '=', '[', 'm', '.', 'branchScale', 'for', 'm', 'in', 'self', '.', '_models', ']', 'return', '(', 'self', '.', 'catweights', '*', 'bscales', ')', '.', 'sum', '(', ')']
See docs for `Model` abstract base class.
['See', 'docs', 'for', 'Model', 'abstract', 'base', 'class', '.']
train
https://github.com/jbloomlab/phydms/blob/9cdebc10bafbe543c552d79486c7f950780ed3c0/phydmslib/models.py#L2226-L2229
5,925
secdev/scapy
scapy/arch/bpf/supersocket.py
bpf_select
def bpf_select(fds_list, timeout=None): """A call to recv() can return several frames. This functions hides the fact that some frames are read from the internal buffer.""" # Check file descriptors types bpf_scks_buffered = list() select_fds = list() for tmp_fd in fds_list: # Specif...
python
def bpf_select(fds_list, timeout=None): """A call to recv() can return several frames. This functions hides the fact that some frames are read from the internal buffer.""" # Check file descriptors types bpf_scks_buffered = list() select_fds = list() for tmp_fd in fds_list: # Specif...
['def', 'bpf_select', '(', 'fds_list', ',', 'timeout', '=', 'None', ')', ':', '# Check file descriptors types', 'bpf_scks_buffered', '=', 'list', '(', ')', 'select_fds', '=', 'list', '(', ')', 'for', 'tmp_fd', 'in', 'fds_list', ':', '# Specific BPF sockets: get buffers status', 'if', 'isBPFSocket', '(', 'tmp_fd', ')', ...
A call to recv() can return several frames. This functions hides the fact that some frames are read from the internal buffer.
['A', 'call', 'to', 'recv', '()', 'can', 'return', 'several', 'frames', '.', 'This', 'functions', 'hides', 'the', 'fact', 'that', 'some', 'frames', 'are', 'read', 'from', 'the', 'internal', 'buffer', '.']
train
https://github.com/secdev/scapy/blob/3ffe757c184017dd46464593a8f80f85abc1e79a/scapy/arch/bpf/supersocket.py#L369-L394
5,926
spulec/moto
scripts/scaffold.py
initialize_service
def initialize_service(service, operation, api_protocol): """create lib and test dirs if not exist """ lib_dir = get_lib_dir(service) test_dir = get_test_dir(service) print_progress('Initializing service', service, 'green') client = boto3.client(service) service_class = client.__class__.__...
python
def initialize_service(service, operation, api_protocol): """create lib and test dirs if not exist """ lib_dir = get_lib_dir(service) test_dir = get_test_dir(service) print_progress('Initializing service', service, 'green') client = boto3.client(service) service_class = client.__class__.__...
['def', 'initialize_service', '(', 'service', ',', 'operation', ',', 'api_protocol', ')', ':', 'lib_dir', '=', 'get_lib_dir', '(', 'service', ')', 'test_dir', '=', 'get_test_dir', '(', 'service', ')', 'print_progress', '(', "'Initializing service'", ',', 'service', ',', "'green'", ')', 'client', '=', 'boto3', '.', 'cli...
create lib and test dirs if not exist
['create', 'lib', 'and', 'test', 'dirs', 'if', 'not', 'exist']
train
https://github.com/spulec/moto/blob/4a286c4bc288933bb023396e2784a6fdbb966bc9/scripts/scaffold.py#L167-L216
5,927
Fantomas42/django-blog-zinnia
zinnia/templatetags/zinnia.py
get_recent_linkbacks
def get_recent_linkbacks(number=5, template='zinnia/tags/linkbacks_recent.html'): """ Return the most recent linkbacks. """ entry_published_pks = map(smart_text, Entry.published.values_list('id', flat=True)) content_type = ContentType.objects.ge...
python
def get_recent_linkbacks(number=5, template='zinnia/tags/linkbacks_recent.html'): """ Return the most recent linkbacks. """ entry_published_pks = map(smart_text, Entry.published.values_list('id', flat=True)) content_type = ContentType.objects.ge...
['def', 'get_recent_linkbacks', '(', 'number', '=', '5', ',', 'template', '=', "'zinnia/tags/linkbacks_recent.html'", ')', ':', 'entry_published_pks', '=', 'map', '(', 'smart_text', ',', 'Entry', '.', 'published', '.', 'values_list', '(', "'id'", ',', 'flat', '=', 'True', ')', ')', 'content_type', '=', 'ContentType', '...
Return the most recent linkbacks.
['Return', 'the', 'most', 'recent', 'linkbacks', '.']
train
https://github.com/Fantomas42/django-blog-zinnia/blob/b4949304b104a8e1a7a7a0773cbfd024313c3a15/zinnia/templatetags/zinnia.py#L245-L263
5,928
gautammishra/lyft-rides-python-sdk
lyft_rides/request.py
Request._build_headers
def _build_headers(self, method, auth_session): """Create headers for the request. Parameters method (str) HTTP method (e.g. 'POST'). auth_session (Session) The Session object containing OAuth 2.0 credentials. Returns headers (d...
python
def _build_headers(self, method, auth_session): """Create headers for the request. Parameters method (str) HTTP method (e.g. 'POST'). auth_session (Session) The Session object containing OAuth 2.0 credentials. Returns headers (d...
['def', '_build_headers', '(', 'self', ',', 'method', ',', 'auth_session', ')', ':', 'token_type', '=', 'auth_session', '.', 'token_type', 'token', '=', 'auth_session', '.', 'oauth2credential', '.', 'access_token', 'if', 'not', 'self', '.', '_authorization_headers_valid', '(', 'token_type', ',', 'token', ')', ':', 'mes...
Create headers for the request. Parameters method (str) HTTP method (e.g. 'POST'). auth_session (Session) The Session object containing OAuth 2.0 credentials. Returns headers (dict) Dictionary of access headers to attach...
['Create', 'headers', 'for', 'the', 'request', '.', 'Parameters', 'method', '(', 'str', ')', 'HTTP', 'method', '(', 'e', '.', 'g', '.', 'POST', ')', '.', 'auth_session', '(', 'Session', ')', 'The', 'Session', 'object', 'containing', 'OAuth', '2', '.', '0', 'credentials', '.', 'Returns', 'headers', '(', 'dict', ')', 'Di...
train
https://github.com/gautammishra/lyft-rides-python-sdk/blob/b6d96a0fceaf7dc3425153c418a8e25c57803431/lyft_rides/request.py#L131-L160
5,929
llllllllll/codetransformer
codetransformer/decompiler/_343.py
make_if_statement
def make_if_statement(instr, queue, stack, context): """ Make an ast.If block from a POP_JUMP_IF_TRUE or POP_JUMP_IF_FALSE. """ test_expr = make_expr(stack) if isinstance(instr, instrs.POP_JUMP_IF_TRUE): test_expr = ast.UnaryOp(op=ast.Not(), operand=test_expr) first_block = popwhile(op....
python
def make_if_statement(instr, queue, stack, context): """ Make an ast.If block from a POP_JUMP_IF_TRUE or POP_JUMP_IF_FALSE. """ test_expr = make_expr(stack) if isinstance(instr, instrs.POP_JUMP_IF_TRUE): test_expr = ast.UnaryOp(op=ast.Not(), operand=test_expr) first_block = popwhile(op....
['def', 'make_if_statement', '(', 'instr', ',', 'queue', ',', 'stack', ',', 'context', ')', ':', 'test_expr', '=', 'make_expr', '(', 'stack', ')', 'if', 'isinstance', '(', 'instr', ',', 'instrs', '.', 'POP_JUMP_IF_TRUE', ')', ':', 'test_expr', '=', 'ast', '.', 'UnaryOp', '(', 'op', '=', 'ast', '.', 'Not', '(', ')', ','...
Make an ast.If block from a POP_JUMP_IF_TRUE or POP_JUMP_IF_FALSE.
['Make', 'an', 'ast', '.', 'If', 'block', 'from', 'a', 'POP_JUMP_IF_TRUE', 'or', 'POP_JUMP_IF_FALSE', '.']
train
https://github.com/llllllllll/codetransformer/blob/c5f551e915df45adc7da7e0b1b635f0cc6a1bb27/codetransformer/decompiler/_343.py#L182-L211
5,930
wdecoster/nanomath
nanomath/nanomath.py
get_N50
def get_N50(readlengths): """Calculate read length N50. Based on https://github.com/PapenfussLab/Mungo/blob/master/bin/fasta_stats.py """ return readlengths[np.where(np.cumsum(readlengths) >= 0.5 * np.sum(readlengths))[0][0]]
python
def get_N50(readlengths): """Calculate read length N50. Based on https://github.com/PapenfussLab/Mungo/blob/master/bin/fasta_stats.py """ return readlengths[np.where(np.cumsum(readlengths) >= 0.5 * np.sum(readlengths))[0][0]]
['def', 'get_N50', '(', 'readlengths', ')', ':', 'return', 'readlengths', '[', 'np', '.', 'where', '(', 'np', '.', 'cumsum', '(', 'readlengths', ')', '>=', '0.5', '*', 'np', '.', 'sum', '(', 'readlengths', ')', ')', '[', '0', ']', '[', '0', ']', ']']
Calculate read length N50. Based on https://github.com/PapenfussLab/Mungo/blob/master/bin/fasta_stats.py
['Calculate', 'read', 'length', 'N50', '.']
train
https://github.com/wdecoster/nanomath/blob/38ede9f957d5c53e2ba3648641e4f23e93b49132/nanomath/nanomath.py#L54-L59
5,931
networks-lab/metaknowledge
metaknowledge/graphHelpers.py
getNodeDegrees
def getNodeDegrees(grph, weightString = "weight", strictMode = False, returnType = int, edgeType = 'bi'): """ Retunrs a dictionary of nodes to their degrees, the degree is determined by adding the weight of edge with the weight being the string weightString that gives the name of the attribute of each edge con...
python
def getNodeDegrees(grph, weightString = "weight", strictMode = False, returnType = int, edgeType = 'bi'): """ Retunrs a dictionary of nodes to their degrees, the degree is determined by adding the weight of edge with the weight being the string weightString that gives the name of the attribute of each edge con...
['def', 'getNodeDegrees', '(', 'grph', ',', 'weightString', '=', '"weight"', ',', 'strictMode', '=', 'False', ',', 'returnType', '=', 'int', ',', 'edgeType', '=', "'bi'", ')', ':', 'ndsDict', '=', '{', '}', 'for', 'nd', 'in', 'grph', '.', 'nodes', '(', ')', ':', 'ndsDict', '[', 'nd', ']', '=', 'returnType', '(', '0', '...
Retunrs a dictionary of nodes to their degrees, the degree is determined by adding the weight of edge with the weight being the string weightString that gives the name of the attribute of each edge containng thier weight. The Weights are then converted to the type returnType. If weightString is give as False instead ea...
['Retunrs', 'a', 'dictionary', 'of', 'nodes', 'to', 'their', 'degrees', 'the', 'degree', 'is', 'determined', 'by', 'adding', 'the', 'weight', 'of', 'edge', 'with', 'the', 'weight', 'being', 'the', 'string', 'weightString', 'that', 'gives', 'the', 'name', 'of', 'the', 'attribute', 'of', 'each', 'edge', 'containng', 'thi...
train
https://github.com/networks-lab/metaknowledge/blob/8162bf95e66bb6f9916081338e6e2a6132faff75/metaknowledge/graphHelpers.py#L457-L486
5,932
kevinpt/hdlparse
hdlparse/verilog_parser.py
VerilogExtractor.extract_objects_from_source
def extract_objects_from_source(self, text, type_filter=None): '''Extract object declarations from a text buffer Args: text (str): Source code to parse type_filter (class, optional): Object class to filter results Returns: List of parsed objects. ''' objects = parse_verilog(text) ...
python
def extract_objects_from_source(self, text, type_filter=None): '''Extract object declarations from a text buffer Args: text (str): Source code to parse type_filter (class, optional): Object class to filter results Returns: List of parsed objects. ''' objects = parse_verilog(text) ...
['def', 'extract_objects_from_source', '(', 'self', ',', 'text', ',', 'type_filter', '=', 'None', ')', ':', 'objects', '=', 'parse_verilog', '(', 'text', ')', 'if', 'type_filter', ':', 'objects', '=', '[', 'o', 'for', 'o', 'in', 'objects', 'if', 'isinstance', '(', 'o', ',', 'type_filter', ')', ']', 'return', 'objects']
Extract object declarations from a text buffer Args: text (str): Source code to parse type_filter (class, optional): Object class to filter results Returns: List of parsed objects.
['Extract', 'object', 'declarations', 'from', 'a', 'text', 'buffer']
train
https://github.com/kevinpt/hdlparse/blob/be7cdab08a8c18815cc4504003ce9ca7fff41022/hdlparse/verilog_parser.py#L249-L263
5,933
thespacedoctor/rockAtlas
rockAtlas/bookkeeping/bookkeeper.py
bookkeeper.import_new_atlas_pointings
def import_new_atlas_pointings( self, recent=False): """ *Import any new ATLAS pointings from the atlas3/atlas4 databases into the ``atlas_exposures`` table of the Atlas Movers database* **Key Arguments:** - ``recent`` -- only sync the most recent 2 weeks of ...
python
def import_new_atlas_pointings( self, recent=False): """ *Import any new ATLAS pointings from the atlas3/atlas4 databases into the ``atlas_exposures`` table of the Atlas Movers database* **Key Arguments:** - ``recent`` -- only sync the most recent 2 weeks of ...
['def', 'import_new_atlas_pointings', '(', 'self', ',', 'recent', '=', 'False', ')', ':', 'self', '.', 'log', '.', 'info', '(', "'starting the ``import_new_atlas_pointings`` method'", ')', 'if', 'recent', ':', 'mjd', '=', 'mjdnow', '(', 'log', '=', 'self', '.', 'log', ')', '.', 'get_mjd', '(', ')', 'recent', '=', 'mjd'...
*Import any new ATLAS pointings from the atlas3/atlas4 databases into the ``atlas_exposures`` table of the Atlas Movers database* **Key Arguments:** - ``recent`` -- only sync the most recent 2 weeks of data (speeds things up) **Return:** - None **Usage:** ...
['*', 'Import', 'any', 'new', 'ATLAS', 'pointings', 'from', 'the', 'atlas3', '/', 'atlas4', 'databases', 'into', 'the', 'atlas_exposures', 'table', 'of', 'the', 'Atlas', 'Movers', 'database', '*']
train
https://github.com/thespacedoctor/rockAtlas/blob/062ecaa95ab547efda535aa33165944f13c621de/rockAtlas/bookkeeping/bookkeeper.py#L108-L231
5,934
manns/pyspread
pyspread/src/gui/_grid.py
GridEventHandlers.OnInsertCols
def OnInsertCols(self, event): """Inserts the maximum of 1 and the number of selected columns""" bbox = self.grid.selection.get_bbox() if bbox is None or bbox[1][1] is None: # Insert rows at cursor ins_point = self.grid.actions.cursor[1] - 1 no_cols = 1 ...
python
def OnInsertCols(self, event): """Inserts the maximum of 1 and the number of selected columns""" bbox = self.grid.selection.get_bbox() if bbox is None or bbox[1][1] is None: # Insert rows at cursor ins_point = self.grid.actions.cursor[1] - 1 no_cols = 1 ...
['def', 'OnInsertCols', '(', 'self', ',', 'event', ')', ':', 'bbox', '=', 'self', '.', 'grid', '.', 'selection', '.', 'get_bbox', '(', ')', 'if', 'bbox', 'is', 'None', 'or', 'bbox', '[', '1', ']', '[', '1', ']', 'is', 'None', ':', '# Insert rows at cursor', 'ins_point', '=', 'self', '.', 'grid', '.', 'actions', '.', 'c...
Inserts the maximum of 1 and the number of selected columns
['Inserts', 'the', 'maximum', 'of', '1', 'and', 'the', 'number', 'of', 'selected', 'columns']
train
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_grid.py#L1346-L1368
5,935
timdiels/pytil
pytil/path.py
hash
def hash(path, hash_function=hashlib.sha512): # @ReservedAssignment ''' Hash file or directory. Parameters ---------- path : ~pathlib.Path File or directory to hash. hash_function : ~typing.Callable[[], hash object] Function which creates a hashlib hash object when called. Defa...
python
def hash(path, hash_function=hashlib.sha512): # @ReservedAssignment ''' Hash file or directory. Parameters ---------- path : ~pathlib.Path File or directory to hash. hash_function : ~typing.Callable[[], hash object] Function which creates a hashlib hash object when called. Defa...
['def', 'hash', '(', 'path', ',', 'hash_function', '=', 'hashlib', '.', 'sha512', ')', ':', '# @ReservedAssignment', 'hash_', '=', 'hash_function', '(', ')', 'if', 'path', '.', 'is_dir', '(', ')', ':', 'for', 'directory', ',', 'directories', ',', 'files', 'in', 'os', '.', 'walk', '(', 'str', '(', 'path', ')', ',', 'top...
Hash file or directory. Parameters ---------- path : ~pathlib.Path File or directory to hash. hash_function : ~typing.Callable[[], hash object] Function which creates a hashlib hash object when called. Defaults to ``hashlib.sha512``. Returns ------- hash object ...
['Hash', 'file', 'or', 'directory', '.']
train
https://github.com/timdiels/pytil/blob/086a3f8d52caecdd9d1c9f66c8d8a6d38667b00b/pytil/path.py#L188-L243
5,936
SUNCAT-Center/CatHub
cathub/ase_tools/__init__.py
update_ase
def update_ase(db_file, identity, stdout, **key_value_pairs): """Connect to ASE db""" db_ase = ase.db.connect(db_file) _normalize_key_value_pairs_inplace(key_value_pairs) count = db_ase.update(identity, **key_value_pairs) stdout.write(' Updating {0} key value pairs in ASE db row id = {1}\n' ...
python
def update_ase(db_file, identity, stdout, **key_value_pairs): """Connect to ASE db""" db_ase = ase.db.connect(db_file) _normalize_key_value_pairs_inplace(key_value_pairs) count = db_ase.update(identity, **key_value_pairs) stdout.write(' Updating {0} key value pairs in ASE db row id = {1}\n' ...
['def', 'update_ase', '(', 'db_file', ',', 'identity', ',', 'stdout', ',', '*', '*', 'key_value_pairs', ')', ':', 'db_ase', '=', 'ase', '.', 'db', '.', 'connect', '(', 'db_file', ')', '_normalize_key_value_pairs_inplace', '(', 'key_value_pairs', ')', 'count', '=', 'db_ase', '.', 'update', '(', 'identity', ',', '*', '*'...
Connect to ASE db
['Connect', 'to', 'ASE', 'db']
train
https://github.com/SUNCAT-Center/CatHub/blob/324625d1d8e740673f139658b2de4c9e1059739e/cathub/ase_tools/__init__.py#L360-L368
5,937
senaite/senaite.core
bika/lims/api/__init__.py
get_parent
def get_parent(brain_or_object, catalog_search=False): """Locate the parent object of the content/catalog brain The `catalog_search` switch uses the `portal_catalog` to do a search return a brain instead of the full parent object. However, if the search returned no results, it falls back to return the ...
python
def get_parent(brain_or_object, catalog_search=False): """Locate the parent object of the content/catalog brain The `catalog_search` switch uses the `portal_catalog` to do a search return a brain instead of the full parent object. However, if the search returned no results, it falls back to return the ...
['def', 'get_parent', '(', 'brain_or_object', ',', 'catalog_search', '=', 'False', ')', ':', 'if', 'is_portal', '(', 'brain_or_object', ')', ':', 'return', 'get_portal', '(', ')', '# Do a catalog search and return the brain', 'if', 'catalog_search', ':', 'parent_path', '=', 'get_parent_path', '(', 'brain_or_object', ')...
Locate the parent object of the content/catalog brain The `catalog_search` switch uses the `portal_catalog` to do a search return a brain instead of the full parent object. However, if the search returned no results, it falls back to return the full parent object. :param brain_or_object: A single cata...
['Locate', 'the', 'parent', 'object', 'of', 'the', 'content', '/', 'catalog', 'brain']
train
https://github.com/senaite/senaite.core/blob/7602ce2ea2f9e81eb34e20ce17b98a3e70713f85/bika/lims/api/__init__.py#L561-L602
5,938
napalm-automation/napalm
napalm/ios/ios.py
IOSDriver._discard_config
def _discard_config(self): """Set candidate_cfg to current running-config. Erase the merge_cfg file.""" discard_candidate = "copy running-config {}".format( self._gen_full_path(self.candidate_cfg) ) discard_merge = "copy null: {}".format(self._gen_full_path(self.merge_cfg)) ...
python
def _discard_config(self): """Set candidate_cfg to current running-config. Erase the merge_cfg file.""" discard_candidate = "copy running-config {}".format( self._gen_full_path(self.candidate_cfg) ) discard_merge = "copy null: {}".format(self._gen_full_path(self.merge_cfg)) ...
['def', '_discard_config', '(', 'self', ')', ':', 'discard_candidate', '=', '"copy running-config {}"', '.', 'format', '(', 'self', '.', '_gen_full_path', '(', 'self', '.', 'candidate_cfg', ')', ')', 'discard_merge', '=', '"copy null: {}"', '.', 'format', '(', 'self', '.', '_gen_full_path', '(', 'self', '.', 'merge_cfg...
Set candidate_cfg to current running-config. Erase the merge_cfg file.
['Set', 'candidate_cfg', 'to', 'current', 'running', '-', 'config', '.', 'Erase', 'the', 'merge_cfg', 'file', '.']
train
https://github.com/napalm-automation/napalm/blob/c11ae8bb5ce395698704a0051cdf8d144fbb150d/napalm/ios/ios.py#L544-L551
5,939
programa-stic/barf-project
barf/arch/x86/parser.py
parse_instruction
def parse_instruction(string, location, tokens): """Parse an x86 instruction. """ prefix_str = tokens.get("prefix", None) mnemonic_str = tokens.get("mnemonic") operands = [op for op in tokens.get("operands", [])] infer_operands_size(operands) # Quick hack: Capstone returns rep instead of r...
python
def parse_instruction(string, location, tokens): """Parse an x86 instruction. """ prefix_str = tokens.get("prefix", None) mnemonic_str = tokens.get("mnemonic") operands = [op for op in tokens.get("operands", [])] infer_operands_size(operands) # Quick hack: Capstone returns rep instead of r...
['def', 'parse_instruction', '(', 'string', ',', 'location', ',', 'tokens', ')', ':', 'prefix_str', '=', 'tokens', '.', 'get', '(', '"prefix"', ',', 'None', ')', 'mnemonic_str', '=', 'tokens', '.', 'get', '(', '"mnemonic"', ')', 'operands', '=', '[', 'op', 'for', 'op', 'in', 'tokens', '.', 'get', '(', '"operands"', ','...
Parse an x86 instruction.
['Parse', 'an', 'x86', 'instruction', '.']
train
https://github.com/programa-stic/barf-project/blob/18ed9e5eace55f7bf6015ec57f037c364099021c/barf/arch/x86/parser.py#L134-L155
5,940
datamachine/twx
twx/twx.py
Peer.send_message
def send_message(self, text: str, reply: int=None, link_preview: bool=None, on_success: callable=None, reply_markup: botapi.ReplyMarkup=None): """ Send message to this peer. :param text: Text to send. :param reply: Message object or message_id to reply to. :p...
python
def send_message(self, text: str, reply: int=None, link_preview: bool=None, on_success: callable=None, reply_markup: botapi.ReplyMarkup=None): """ Send message to this peer. :param text: Text to send. :param reply: Message object or message_id to reply to. :p...
['def', 'send_message', '(', 'self', ',', 'text', ':', 'str', ',', 'reply', ':', 'int', '=', 'None', ',', 'link_preview', ':', 'bool', '=', 'None', ',', 'on_success', ':', 'callable', '=', 'None', ',', 'reply_markup', ':', 'botapi', '.', 'ReplyMarkup', '=', 'None', ')', ':', 'self', '.', 'twx', '.', 'send_message', '('...
Send message to this peer. :param text: Text to send. :param reply: Message object or message_id to reply to. :param link_preview: Whether or not to show the link preview for this message :param on_success: Callback to call when call is complete. :type reply: int or Message
['Send', 'message', 'to', 'this', 'peer', '.', ':', 'param', 'text', ':', 'Text', 'to', 'send', '.', ':', 'param', 'reply', ':', 'Message', 'object', 'or', 'message_id', 'to', 'reply', 'to', '.', ':', 'param', 'link_preview', ':', 'Whether', 'or', 'not', 'to', 'show', 'the', 'link', 'preview', 'for', 'this', 'message',...
train
https://github.com/datamachine/twx/blob/d9633f12f3647b1e54ba87b70b39df3b7e02b4eb/twx/twx.py#L78-L90
5,941
googleapis/google-cloud-python
logging/google/cloud/logging/_gapic.py
_parse_log_entry
def _parse_log_entry(entry_pb): """Special helper to parse ``LogEntry`` protobuf into a dictionary. The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This can be problematic if the type URL in the payload isn't in the ``google.protobuf`` registry. To help with parsing unregistered types, ...
python
def _parse_log_entry(entry_pb): """Special helper to parse ``LogEntry`` protobuf into a dictionary. The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This can be problematic if the type URL in the payload isn't in the ``google.protobuf`` registry. To help with parsing unregistered types, ...
['def', '_parse_log_entry', '(', 'entry_pb', ')', ':', 'try', ':', 'return', 'MessageToDict', '(', 'entry_pb', ')', 'except', 'TypeError', ':', 'if', 'entry_pb', '.', 'HasField', '(', '"proto_payload"', ')', ':', 'proto_payload', '=', 'entry_pb', '.', 'proto_payload', 'entry_pb', '.', 'ClearField', '(', '"proto_payload...
Special helper to parse ``LogEntry`` protobuf into a dictionary. The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This can be problematic if the type URL in the payload isn't in the ``google.protobuf`` registry. To help with parsing unregistered types, this function will remove ``proto_p...
['Special', 'helper', 'to', 'parse', 'LogEntry', 'protobuf', 'into', 'a', 'dictionary', '.']
train
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/_gapic.py#L421-L447
5,942
couchbase/couchbase-python-client
couchbase/bucket.py
Bucket.append_items
def append_items(self, items, **kwargs): """ Method to append data to multiple :class:`~.Item` objects. This method differs from the normal :meth:`append_multi` in that each `Item`'s `value` field is updated with the appended data upon successful completion of the operation. ...
python
def append_items(self, items, **kwargs): """ Method to append data to multiple :class:`~.Item` objects. This method differs from the normal :meth:`append_multi` in that each `Item`'s `value` field is updated with the appended data upon successful completion of the operation. ...
['def', 'append_items', '(', 'self', ',', 'items', ',', '*', '*', 'kwargs', ')', ':', 'rv', '=', 'self', '.', 'append_multi', '(', 'items', ',', '*', '*', 'kwargs', ')', "# Assume this is an 'ItemOptionDict'", 'for', 'k', ',', 'v', 'in', 'items', '.', 'dict', '.', 'items', '(', ')', ':', 'if', 'k', '.', 'success', ':',...
Method to append data to multiple :class:`~.Item` objects. This method differs from the normal :meth:`append_multi` in that each `Item`'s `value` field is updated with the appended data upon successful completion of the operation. :param items: The item dictionary. The value for each ...
['Method', 'to', 'append', 'data', 'to', 'multiple', ':', 'class', ':', '~', '.', 'Item', 'objects', '.']
train
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/bucket.py#L1597-L1622
5,943
odlgroup/odl
odl/discr/discretization.py
DiscretizedSpace._lincomb
def _lincomb(self, a, x1, b, x2, out): """Raw linear combination.""" self.tspace._lincomb(a, x1.tensor, b, x2.tensor, out.tensor)
python
def _lincomb(self, a, x1, b, x2, out): """Raw linear combination.""" self.tspace._lincomb(a, x1.tensor, b, x2.tensor, out.tensor)
['def', '_lincomb', '(', 'self', ',', 'a', ',', 'x1', ',', 'b', ',', 'x2', ',', 'out', ')', ':', 'self', '.', 'tspace', '.', '_lincomb', '(', 'a', ',', 'x1', '.', 'tensor', ',', 'b', ',', 'x2', '.', 'tensor', ',', 'out', '.', 'tensor', ')']
Raw linear combination.
['Raw', 'linear', 'combination', '.']
train
https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/discr/discretization.py#L253-L255
5,944
merll/docker-map
dockermap/map/input.py
SharedHostVolumesList.get_type_item
def get_type_item(self, value): """ Converts the input to a ``SharedVolume`` or ``HostVolume`` tuple for a host bind. Input can be a single string, a list or tuple, or a single-entry dictionary. Single values are assumed to be volume aliases for read-write access. Tuples or lists with tw...
python
def get_type_item(self, value): """ Converts the input to a ``SharedVolume`` or ``HostVolume`` tuple for a host bind. Input can be a single string, a list or tuple, or a single-entry dictionary. Single values are assumed to be volume aliases for read-write access. Tuples or lists with tw...
['def', 'get_type_item', '(', 'self', ',', 'value', ')', ':', 'if', 'isinstance', '(', 'value', ',', '(', 'HostVolume', ',', 'SharedVolume', ')', ')', ':', 'return', 'value', 'elif', 'isinstance', '(', 'value', ',', 'six', '.', 'string_types', ')', ':', 'return', 'SharedVolume', '(', 'value', ',', 'False', ')', 'elif',...
Converts the input to a ``SharedVolume`` or ``HostVolume`` tuple for a host bind. Input can be a single string, a list or tuple, or a single-entry dictionary. Single values are assumed to be volume aliases for read-write access. Tuples or lists with two elements, can be ``(alias, read-only indic...
['Converts', 'the', 'input', 'to', 'a', 'SharedVolume', 'or', 'HostVolume', 'tuple', 'for', 'a', 'host', 'bind', '.', 'Input', 'can', 'be', 'a', 'single', 'string', 'a', 'list', 'or', 'tuple', 'or', 'a', 'single', '-', 'entry', 'dictionary', '.', 'Single', 'values', 'are', 'assumed', 'to', 'be', 'volume', 'aliases', 'f...
train
https://github.com/merll/docker-map/blob/e14fe86a6ff5c33d121eb2f9157e9359cb80dd02/dockermap/map/input.py#L394-L428
5,945
i3visio/osrframework
osrframework/thirdparties/pipl_com/lib/fields.py
DateRange.from_dict
def from_dict(d): """Transform the dict to a DateRange object.""" start = d.get('start') end = d.get('end') if not (start and end): raise ValueError('DateRange must have both start and end') start = str_to_date(start) end = str_to_date(end) ret...
python
def from_dict(d): """Transform the dict to a DateRange object.""" start = d.get('start') end = d.get('end') if not (start and end): raise ValueError('DateRange must have both start and end') start = str_to_date(start) end = str_to_date(end) ret...
['def', 'from_dict', '(', 'd', ')', ':', 'start', '=', 'd', '.', 'get', '(', "'start'", ')', 'end', '=', 'd', '.', 'get', '(', "'end'", ')', 'if', 'not', '(', 'start', 'and', 'end', ')', ':', 'raise', 'ValueError', '(', "'DateRange must have both start and end'", ')', 'start', '=', 'str_to_date', '(', 'start', ')', 'en...
Transform the dict to a DateRange object.
['Transform', 'the', 'dict', 'to', 'a', 'DateRange', 'object', '.']
train
https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L854-L862
5,946
tensorflow/tensor2tensor
tensor2tensor/data_generators/video_utils.py
display_video_hooks
def display_video_hooks(hook_args): """Hooks to display videos at decode time.""" predictions = hook_args.predictions max_outputs = hook_args.decode_hparams.max_display_outputs max_decodes = hook_args.decode_hparams.max_display_decodes with tf.Graph().as_default(): _, best_decodes = video_metrics.compute...
python
def display_video_hooks(hook_args): """Hooks to display videos at decode time.""" predictions = hook_args.predictions max_outputs = hook_args.decode_hparams.max_display_outputs max_decodes = hook_args.decode_hparams.max_display_decodes with tf.Graph().as_default(): _, best_decodes = video_metrics.compute...
['def', 'display_video_hooks', '(', 'hook_args', ')', ':', 'predictions', '=', 'hook_args', '.', 'predictions', 'max_outputs', '=', 'hook_args', '.', 'decode_hparams', '.', 'max_display_outputs', 'max_decodes', '=', 'hook_args', '.', 'decode_hparams', '.', 'max_display_decodes', 'with', 'tf', '.', 'Graph', '(', ')', '....
Hooks to display videos at decode time.
['Hooks', 'to', 'display', 'videos', 'at', 'decode', 'time', '.']
train
https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/data_generators/video_utils.py#L165-L206
5,947
bspaans/python-mingus
mingus/midi/midi_file_in.py
MidiFile.parse_track_header
def parse_track_header(self, fp): """Return the size of the track chunk.""" # Check the header try: h = fp.read(4) self.bytes_read += 4 except: raise IOError("Couldn't read track header from file. Byte %d." % self.bytes_read) ...
python
def parse_track_header(self, fp): """Return the size of the track chunk.""" # Check the header try: h = fp.read(4) self.bytes_read += 4 except: raise IOError("Couldn't read track header from file. Byte %d." % self.bytes_read) ...
['def', 'parse_track_header', '(', 'self', ',', 'fp', ')', ':', '# Check the header', 'try', ':', 'h', '=', 'fp', '.', 'read', '(', '4', ')', 'self', '.', 'bytes_read', '+=', '4', 'except', ':', 'raise', 'IOError', '(', '"Couldn\'t read track header from file. Byte %d."', '%', 'self', '.', 'bytes_read', ')', 'if', 'h',...
Return the size of the track chunk.
['Return', 'the', 'size', 'of', 'the', 'track', 'chunk', '.']
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/midi/midi_file_in.py#L322-L342
5,948
datacamp/protowhat
protowhat/checks/check_funcs.py
has_code
def has_code( state, text, incorrect_msg="Check the {ast_path}. The checker expected to find {text}.", fixed=False, ): """Test whether the student code contains text. Args: state: State instance describing student and solution code. Can be omitted if used with Ex(). text : text ...
python
def has_code( state, text, incorrect_msg="Check the {ast_path}. The checker expected to find {text}.", fixed=False, ): """Test whether the student code contains text. Args: state: State instance describing student and solution code. Can be omitted if used with Ex(). text : text ...
['def', 'has_code', '(', 'state', ',', 'text', ',', 'incorrect_msg', '=', '"Check the {ast_path}. The checker expected to find {text}."', ',', 'fixed', '=', 'False', ',', ')', ':', 'stu_ast', '=', 'state', '.', 'student_ast', 'stu_code', '=', 'state', '.', 'student_code', '# fallback on using complete student code if n...
Test whether the student code contains text. Args: state: State instance describing student and solution code. Can be omitted if used with Ex(). text : text that student code must contain. Can be a regex pattern or a simple string. incorrect_msg: feedback message if text is not in student c...
['Test', 'whether', 'the', 'student', 'code', 'contains', 'text', '.']
train
https://github.com/datacamp/protowhat/blob/a392b4e51e07a2e50e7b7f6ad918b3f5cbb63edc/protowhat/checks/check_funcs.py#L172-L243
5,949
pletzer/pnumpy
src/pnGhostedDistArray.py
gmdaArray
def gmdaArray(arry, dtype, mask=None, numGhosts=1): """ ghosted distributed array constructor @param arry numpy-like array @param numGhosts the number of ghosts (>= 0) """ a = numpy.array(arry, dtype) res = GhostedMaskedDistArray(a.shape, a.dtype) res.mask = mask res.setNumberOfGhost...
python
def gmdaArray(arry, dtype, mask=None, numGhosts=1): """ ghosted distributed array constructor @param arry numpy-like array @param numGhosts the number of ghosts (>= 0) """ a = numpy.array(arry, dtype) res = GhostedMaskedDistArray(a.shape, a.dtype) res.mask = mask res.setNumberOfGhost...
['def', 'gmdaArray', '(', 'arry', ',', 'dtype', ',', 'mask', '=', 'None', ',', 'numGhosts', '=', '1', ')', ':', 'a', '=', 'numpy', '.', 'array', '(', 'arry', ',', 'dtype', ')', 'res', '=', 'GhostedMaskedDistArray', '(', 'a', '.', 'shape', ',', 'a', '.', 'dtype', ')', 'res', '.', 'mask', '=', 'mask', 'res', '.', 'setNum...
ghosted distributed array constructor @param arry numpy-like array @param numGhosts the number of ghosts (>= 0)
['ghosted', 'distributed', 'array', 'constructor']
train
https://github.com/pletzer/pnumpy/blob/9e6d308be94a42637466b91ab1a7b4d64b4c29ae/src/pnGhostedDistArray.py#L147-L158
5,950
bxlab/bx-python
lib/bx_extras/pstat.py
abut
def abut (source,*args): """ Like the |Stat abut command. It concatenates two lists side-by-side and returns the result. '2D' lists are also accomodated for either argument (source or addon). CAUTION: If one list is shorter, it will be repeated until it is as long as the longest list. If this behavior is not d...
python
def abut (source,*args): """ Like the |Stat abut command. It concatenates two lists side-by-side and returns the result. '2D' lists are also accomodated for either argument (source or addon). CAUTION: If one list is shorter, it will be repeated until it is as long as the longest list. If this behavior is not d...
['def', 'abut', '(', 'source', ',', '*', 'args', ')', ':', 'if', 'type', '(', 'source', ')', 'not', 'in', '[', 'ListType', ',', 'TupleType', ']', ':', 'source', '=', '[', 'source', ']', 'for', 'addon', 'in', 'args', ':', 'if', 'type', '(', 'addon', ')', 'not', 'in', '[', 'ListType', ',', 'TupleType', ']', ':', 'addon',...
Like the |Stat abut command. It concatenates two lists side-by-side and returns the result. '2D' lists are also accomodated for either argument (source or addon). CAUTION: If one list is shorter, it will be repeated until it is as long as the longest list. If this behavior is not desired, use pstat.simpleabut(). ...
['Like', 'the', '|Stat', 'abut', 'command', '.', 'It', 'concatenates', 'two', 'lists', 'side', '-', 'by', '-', 'side', 'and', 'returns', 'the', 'result', '.', '2D', 'lists', 'are', 'also', 'accomodated', 'for', 'either', 'argument', '(', 'source', 'or', 'addon', ')', '.', 'CAUTION', ':', 'If', 'one', 'list', 'is', 'sho...
train
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx_extras/pstat.py#L122-L166
5,951
steenzout/python-serialization-json
steenzout/serialization/json/encoders.py
as_date
def as_date(dat): """Return the RFC3339 UTC string representation of the given date and time. Args: dat (:py:class:`datetime.date`): the object/type to be serialized. Raises: TypeError: when ``o`` is not an instance of ``datetime.date``. Returns: (str) JSON seriali...
python
def as_date(dat): """Return the RFC3339 UTC string representation of the given date and time. Args: dat (:py:class:`datetime.date`): the object/type to be serialized. Raises: TypeError: when ``o`` is not an instance of ``datetime.date``. Returns: (str) JSON seriali...
['def', 'as_date', '(', 'dat', ')', ':', 'LOGGER', '.', 'debug', '(', "'as_date(%s)'", ',', 'dat', ')', 'return', 'strict_rfc3339', '.', 'timestamp_to_rfc3339_utcoffset', '(', 'calendar', '.', 'timegm', '(', 'dat', '.', 'timetuple', '(', ')', ')', ')']
Return the RFC3339 UTC string representation of the given date and time. Args: dat (:py:class:`datetime.date`): the object/type to be serialized. Raises: TypeError: when ``o`` is not an instance of ``datetime.date``. Returns: (str) JSON serializable type for the given ...
['Return', 'the', 'RFC3339', 'UTC', 'string', 'representation', 'of', 'the', 'given', 'date', 'and', 'time', '.']
train
https://github.com/steenzout/python-serialization-json/blob/583568e14cc02ba0bf711f56b8a0a3ad142c696d/steenzout/serialization/json/encoders.py#L64-L80
5,952
manns/pyspread
pyspread/src/actions/_grid_actions.py
FindActions.find
def find(self, gridpos, find_string, flags, search_result=True): """Return next position of event_find_string in MainGrid Parameters: ----------- gridpos: 3-tuple of Integer \tPosition at which the search starts find_string: String \tString to find in grid ...
python
def find(self, gridpos, find_string, flags, search_result=True): """Return next position of event_find_string in MainGrid Parameters: ----------- gridpos: 3-tuple of Integer \tPosition at which the search starts find_string: String \tString to find in grid ...
['def', 'find', '(', 'self', ',', 'gridpos', ',', 'find_string', ',', 'flags', ',', 'search_result', '=', 'True', ')', ':', 'findfunc', '=', 'self', '.', 'grid', '.', 'code_array', '.', 'findnextmatch', 'if', '"DOWN"', 'in', 'flags', ':', 'if', 'gridpos', '[', '0', ']', '<', 'self', '.', 'grid', '.', 'code_array', '.',...
Return next position of event_find_string in MainGrid Parameters: ----------- gridpos: 3-tuple of Integer \tPosition at which the search starts find_string: String \tString to find in grid flags: List of strings \tSearch flag out of \t["UP" xor "D...
['Return', 'next', 'position', 'of', 'event_find_string', 'in', 'MainGrid']
train
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/actions/_grid_actions.py#L1750-L1788
5,953
mitsei/dlkit
dlkit/records/assessment/mecqbank/mecqbank_base_records.py
MecQBankBaseMixin._init_map
def _init_map(self): """stub""" SimpleDifficultyItemFormRecord._init_map(self) SourceItemFormRecord._init_map(self) PDFPreviewFormRecord._init_map(self) PublishedFormRecord._init_map(self) ProvenanceFormRecord._init_map(self) super(MecQBankBaseMixin, self)._init_m...
python
def _init_map(self): """stub""" SimpleDifficultyItemFormRecord._init_map(self) SourceItemFormRecord._init_map(self) PDFPreviewFormRecord._init_map(self) PublishedFormRecord._init_map(self) ProvenanceFormRecord._init_map(self) super(MecQBankBaseMixin, self)._init_m...
['def', '_init_map', '(', 'self', ')', ':', 'SimpleDifficultyItemFormRecord', '.', '_init_map', '(', 'self', ')', 'SourceItemFormRecord', '.', '_init_map', '(', 'self', ')', 'PDFPreviewFormRecord', '.', '_init_map', '(', 'self', ')', 'PublishedFormRecord', '.', '_init_map', '(', 'self', ')', 'ProvenanceFormRecord', '.'...
stub
['stub']
train
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/records/assessment/mecqbank/mecqbank_base_records.py#L440-L447
5,954
OLC-Bioinformatics/sipprverse
genesippr_validation.py
ReadPrep.link_reads
def link_reads(self, analysistype): """ Create folders with relative symlinks to the desired simulated/sampled reads. These folders will contain all the reads created for each sample, and will be processed with GeneSippr and COWBAT pipelines :param analysistype: Current analysis type. Wi...
python
def link_reads(self, analysistype): """ Create folders with relative symlinks to the desired simulated/sampled reads. These folders will contain all the reads created for each sample, and will be processed with GeneSippr and COWBAT pipelines :param analysistype: Current analysis type. Wi...
['def', 'link_reads', '(', 'self', ',', 'analysistype', ')', ':', 'logging', '.', 'info', '(', "'Linking {at} reads'", '.', 'format', '(', 'at', '=', 'analysistype', ')', ')', 'for', 'sample', 'in', 'self', '.', 'metadata', ':', '# Create the output directories', 'genesippr_dir', '=', 'os', '.', 'path', '.', 'join', '(...
Create folders with relative symlinks to the desired simulated/sampled reads. These folders will contain all the reads created for each sample, and will be processed with GeneSippr and COWBAT pipelines :param analysistype: Current analysis type. Will either be 'simulated' or 'sampled'
['Create', 'folders', 'with', 'relative', 'symlinks', 'to', 'the', 'desired', 'simulated', '/', 'sampled', 'reads', '.', 'These', 'folders', 'will', 'contain', 'all', 'the', 'reads', 'created', 'for', 'each', 'sample', 'and', 'will', 'be', 'processed', 'with', 'GeneSippr', 'and', 'COWBAT', 'pipelines', ':', 'param', 'a...
train
https://github.com/OLC-Bioinformatics/sipprverse/blob/d4f10cdf8e1a39dac0953db61c21c97efc6006de/genesippr_validation.py#L537-L584
5,955
KrishnaswamyLab/graphtools
graphtools/graphs.py
LandmarkGraph.extend_to_data
def extend_to_data(self, data, **kwargs): """Build transition matrix from new data to the graph Creates a transition matrix such that `Y` can be approximated by a linear combination of landmarks. Any transformation of the landmarks can be trivially applied to `Y` by performing ...
python
def extend_to_data(self, data, **kwargs): """Build transition matrix from new data to the graph Creates a transition matrix such that `Y` can be approximated by a linear combination of landmarks. Any transformation of the landmarks can be trivially applied to `Y` by performing ...
['def', 'extend_to_data', '(', 'self', ',', 'data', ',', '*', '*', 'kwargs', ')', ':', 'kernel', '=', 'self', '.', 'build_kernel_to_data', '(', 'data', ',', '*', '*', 'kwargs', ')', 'if', 'sparse', '.', 'issparse', '(', 'kernel', ')', ':', 'pnm', '=', 'sparse', '.', 'hstack', '(', '[', 'sparse', '.', 'csr_matrix', '(',...
Build transition matrix from new data to the graph Creates a transition matrix such that `Y` can be approximated by a linear combination of landmarks. Any transformation of the landmarks can be trivially applied to `Y` by performing `transform_Y = transitions.dot(transform)` ...
['Build', 'transition', 'matrix', 'from', 'new', 'data', 'to', 'the', 'graph']
train
https://github.com/KrishnaswamyLab/graphtools/blob/44685352be7df2005d44722903092207967457f2/graphtools/graphs.py#L637-L671
5,956
Numigi/gitoo
src/core.py
parse_url
def parse_url(url): """ Parse the given url and update it with environment value if required. :param basestring url: :rtype: basestring :raise: KeyError if environment variable is needed but not found. """ # the url has to be a unicode by pystache's design, but the unicode concept has been rewa...
python
def parse_url(url): """ Parse the given url and update it with environment value if required. :param basestring url: :rtype: basestring :raise: KeyError if environment variable is needed but not found. """ # the url has to be a unicode by pystache's design, but the unicode concept has been rewa...
['def', 'parse_url', '(', 'url', ')', ':', "# the url has to be a unicode by pystache's design, but the unicode concept has been rewamped in py3", '# we use a try except to make the code compatible with py2 and py3', 'try', ':', 'url', '=', 'unicode', '(', 'url', ')', 'except', 'NameError', ':', 'url', '=', 'url', 'par...
Parse the given url and update it with environment value if required. :param basestring url: :rtype: basestring :raise: KeyError if environment variable is needed but not found.
['Parse', 'the', 'given', 'url', 'and', 'update', 'it', 'with', 'environment', 'value', 'if', 'required', '.']
train
https://github.com/Numigi/gitoo/blob/0921f5fb8a948021760bb0373a40f9fbe8a4a2e5/src/core.py#L242-L259
5,957
pycontribs/pyrax
pyrax/cloudblockstorage.py
CloudBlockStorageVolume.detach
def detach(self): """ Detaches this volume from any device it may be attached to. If it is not attached, nothing happens. """ attachments = self.attachments if not attachments: # Not attached; no error needed, just return return # A volume ...
python
def detach(self): """ Detaches this volume from any device it may be attached to. If it is not attached, nothing happens. """ attachments = self.attachments if not attachments: # Not attached; no error needed, just return return # A volume ...
['def', 'detach', '(', 'self', ')', ':', 'attachments', '=', 'self', '.', 'attachments', 'if', 'not', 'attachments', ':', '# Not attached; no error needed, just return', 'return', '# A volume can only be attached to one device at a time, but for some', '# reason this is a list instead of a singular value', 'att', '=', ...
Detaches this volume from any device it may be attached to. If it is not attached, nothing happens.
['Detaches', 'this', 'volume', 'from', 'any', 'device', 'it', 'may', 'be', 'attached', 'to', '.', 'If', 'it', 'is', 'not', 'attached', 'nothing', 'happens', '.']
train
https://github.com/pycontribs/pyrax/blob/9ddfd5064b3a292d7337906f3b2d5dce95b50b99/pyrax/cloudblockstorage.py#L167-L184
5,958
gem/oq-engine
openquake/server/db/actions.py
get_log_slice
def get_log_slice(db, job_id, start, stop): """ Get a slice of the calculation log as a JSON list of rows :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param start: start of the slice :param stop: end of the slice (the last ele...
python
def get_log_slice(db, job_id, start, stop): """ Get a slice of the calculation log as a JSON list of rows :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param start: start of the slice :param stop: end of the slice (the last ele...
['def', 'get_log_slice', '(', 'db', ',', 'job_id', ',', 'start', ',', 'stop', ')', ':', 'start', '=', 'int', '(', 'start', ')', 'stop', '=', 'int', '(', 'stop', ')', 'limit', '=', '-', '1', 'if', 'stop', '==', '0', 'else', 'stop', '-', 'start', 'logs', '=', 'db', '(', "'SELECT * FROM log WHERE job_id=?x '", "'ORDER BY ...
Get a slice of the calculation log as a JSON list of rows :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: a job ID :param start: start of the slice :param stop: end of the slice (the last element is excluded)
['Get', 'a', 'slice', 'of', 'the', 'calculation', 'log', 'as', 'a', 'JSON', 'list', 'of', 'rows']
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/server/db/actions.py#L588-L610
5,959
hellosign/hellosign-python-sdk
hellosign_sdk/resource/signature_request.py
SignatureRequest.find_response_component
def find_response_component(self, api_id=None, signature_id=None): ''' Find one or many repsonse components. Args: api_id (str): Api id associated with the component(s) to be retrieved. signature_id (str): Signature id associated with the component(s)...
python
def find_response_component(self, api_id=None, signature_id=None): ''' Find one or many repsonse components. Args: api_id (str): Api id associated with the component(s) to be retrieved. signature_id (str): Signature id associated with the component(s)...
['def', 'find_response_component', '(', 'self', ',', 'api_id', '=', 'None', ',', 'signature_id', '=', 'None', ')', ':', 'if', 'not', 'api_id', 'and', 'not', 'signature_id', ':', 'raise', 'ValueError', '(', "'At least one of api_id and signature_id is required'", ')', 'components', '=', 'list', '(', ')', 'if', 'self', '...
Find one or many repsonse components. Args: api_id (str): Api id associated with the component(s) to be retrieved. signature_id (str): Signature id associated with the component(s) to be retrieved. Returns: A list of dictionaries ...
['Find', 'one', 'or', 'many', 'repsonse', 'components', '.']
train
https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/resource/signature_request.py#L114-L137
5,960
SFDO-Tooling/CumulusCI
cumulusci/core/keychain/BaseProjectKeychain.py
BaseProjectKeychain.set_service
def set_service(self, name, service_config, project=False): """ Store a ServiceConfig in the keychain """ if not self.project_config.services or name not in self.project_config.services: self._raise_service_not_valid(name) self._validate_service(name, service_config) self._se...
python
def set_service(self, name, service_config, project=False): """ Store a ServiceConfig in the keychain """ if not self.project_config.services or name not in self.project_config.services: self._raise_service_not_valid(name) self._validate_service(name, service_config) self._se...
['def', 'set_service', '(', 'self', ',', 'name', ',', 'service_config', ',', 'project', '=', 'False', ')', ':', 'if', 'not', 'self', '.', 'project_config', '.', 'services', 'or', 'name', 'not', 'in', 'self', '.', 'project_config', '.', 'services', ':', 'self', '.', '_raise_service_not_valid', '(', 'name', ')', 'self', ...
Store a ServiceConfig in the keychain
['Store', 'a', 'ServiceConfig', 'in', 'the', 'keychain']
train
https://github.com/SFDO-Tooling/CumulusCI/blob/e19047921ca771a297e045f22f0bb201651bb6f7/cumulusci/core/keychain/BaseProjectKeychain.py#L184-L190
5,961
senaite/senaite.core
bika/lims/exportimport/instruments/thermoscientific/multiskan/__init__.py
ThermoScientificMultiskanCSVParser.parse_data
def parse_data(self, sline): """This function builds the addRawResults dictionary using the header values of the labels section as sample Ids. """ if sline[0] == '': return 0 for idx, label in enumerate(self._labels_values[sline[0]]): if label != '': ...
python
def parse_data(self, sline): """This function builds the addRawResults dictionary using the header values of the labels section as sample Ids. """ if sline[0] == '': return 0 for idx, label in enumerate(self._labels_values[sline[0]]): if label != '': ...
['def', 'parse_data', '(', 'self', ',', 'sline', ')', ':', 'if', 'sline', '[', '0', ']', '==', "''", ':', 'return', '0', 'for', 'idx', ',', 'label', 'in', 'enumerate', '(', 'self', '.', '_labels_values', '[', 'sline', '[', '0', ']', ']', ')', ':', 'if', 'label', '!=', "''", ':', 'self', '.', '_addRawResult', '(', 'labe...
This function builds the addRawResults dictionary using the header values of the labels section as sample Ids.
['This', 'function', 'builds', 'the', 'addRawResults', 'dictionary', 'using', 'the', 'header', 'values', 'of', 'the', 'labels', 'section', 'as', 'sample', 'Ids', '.']
train
https://github.com/senaite/senaite.core/blob/7602ce2ea2f9e81eb34e20ce17b98a3e70713f85/bika/lims/exportimport/instruments/thermoscientific/multiskan/__init__.py#L63-L72
5,962
sdss/tree
python/tree/tree.py
Tree.branch_out
def branch_out(self, limb=None): ''' Set the individual section branches This adds the various sections of the config file into the tree environment for access later. Optically can specify a specific branch. This does not yet load them into the os environment. Parameters: ...
python
def branch_out(self, limb=None): ''' Set the individual section branches This adds the various sections of the config file into the tree environment for access later. Optically can specify a specific branch. This does not yet load them into the os environment. Parameters: ...
['def', 'branch_out', '(', 'self', ',', 'limb', '=', 'None', ')', ':', '# Filter on sections', 'if', 'not', 'limb', ':', 'limbs', '=', 'self', '.', '_cfg', '.', 'sections', '(', ')', 'else', ':', '# we must have the general always + secton', 'limb', '=', 'limb', 'if', 'isinstance', '(', 'limb', ',', 'list', ')', 'else'...
Set the individual section branches This adds the various sections of the config file into the tree environment for access later. Optically can specify a specific branch. This does not yet load them into the os environment. Parameters: limb (str/list): The ...
['Set', 'the', 'individual', 'section', 'branches']
train
https://github.com/sdss/tree/blob/f61fe0876c138ccb61874912d4b8590dadfa835c/python/tree/tree.py#L137-L172
5,963
lreis2415/PyGeoC
pygeoc/postTauDEM.py
StreamnetUtil.serialize_streamnet
def serialize_streamnet(streamnet_file, output_reach_file): """Eliminate reach with zero length and return the reach ID map. Args: streamnet_file: original stream net ESRI shapefile output_reach_file: serialized stream net, ESRI shapefile Returns: id pairs {o...
python
def serialize_streamnet(streamnet_file, output_reach_file): """Eliminate reach with zero length and return the reach ID map. Args: streamnet_file: original stream net ESRI shapefile output_reach_file: serialized stream net, ESRI shapefile Returns: id pairs {o...
['def', 'serialize_streamnet', '(', 'streamnet_file', ',', 'output_reach_file', ')', ':', 'FileClass', '.', 'copy_files', '(', 'streamnet_file', ',', 'output_reach_file', ')', 'ds_reach', '=', 'ogr_Open', '(', 'output_reach_file', ',', 'update', '=', 'True', ')', 'layer_reach', '=', 'ds_reach', '.', 'GetLayer', '(', '0...
Eliminate reach with zero length and return the reach ID map. Args: streamnet_file: original stream net ESRI shapefile output_reach_file: serialized stream net, ESRI shapefile Returns: id pairs {origin: newly assigned}
['Eliminate', 'reach', 'with', 'zero', 'length', 'and', 'return', 'the', 'reach', 'ID', 'map', '.', 'Args', ':', 'streamnet_file', ':', 'original', 'stream', 'net', 'ESRI', 'shapefile', 'output_reach_file', ':', 'serialized', 'stream', 'net', 'ESRI', 'shapefile']
train
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/postTauDEM.py#L199-L266
5,964
turicas/rows
rows/plugins/txt.py
import_from_txt
def import_from_txt( filename_or_fobj, encoding="utf-8", frame_style=FRAME_SENTINEL, *args, **kwargs ): """Return a rows.Table created from imported TXT file.""" # TODO: (maybe) # enable parsing of non-fixed-width-columns # with old algorithm - that would just split columns # at the vertical se...
python
def import_from_txt( filename_or_fobj, encoding="utf-8", frame_style=FRAME_SENTINEL, *args, **kwargs ): """Return a rows.Table created from imported TXT file.""" # TODO: (maybe) # enable parsing of non-fixed-width-columns # with old algorithm - that would just split columns # at the vertical se...
['def', 'import_from_txt', '(', 'filename_or_fobj', ',', 'encoding', '=', '"utf-8"', ',', 'frame_style', '=', 'FRAME_SENTINEL', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', '# TODO: (maybe)', '# enable parsing of non-fixed-width-columns', '# with old algorithm - that would just split columns', '# at the vertica...
Return a rows.Table created from imported TXT file.
['Return', 'a', 'rows', '.', 'Table', 'created', 'from', 'imported', 'TXT', 'file', '.']
train
https://github.com/turicas/rows/blob/c74da41ae9ed091356b803a64f8a30c641c5fc45/rows/plugins/txt.py#L130-L179
5,965
OLC-Bioinformatics/sipprverse
pointsippr/pointsippr.py
PointSippr.run_pointfinder
def run_pointfinder(self): """ Run PointFinder on the FASTA sequences extracted from the raw reads """ logging.info('Running PointFinder on FASTA files') for i in range(len(self.runmetadata.samples)): # Start threads threads = Thread(target=self.pointfinde...
python
def run_pointfinder(self): """ Run PointFinder on the FASTA sequences extracted from the raw reads """ logging.info('Running PointFinder on FASTA files') for i in range(len(self.runmetadata.samples)): # Start threads threads = Thread(target=self.pointfinde...
['def', 'run_pointfinder', '(', 'self', ')', ':', 'logging', '.', 'info', '(', "'Running PointFinder on FASTA files'", ')', 'for', 'i', 'in', 'range', '(', 'len', '(', 'self', '.', 'runmetadata', '.', 'samples', ')', ')', ':', '# Start threads', 'threads', '=', 'Thread', '(', 'target', '=', 'self', '.', 'pointfinder_th...
Run PointFinder on the FASTA sequences extracted from the raw reads
['Run', 'PointFinder', 'on', 'the', 'FASTA', 'sequences', 'extracted', 'from', 'the', 'raw', 'reads']
train
https://github.com/OLC-Bioinformatics/sipprverse/blob/d4f10cdf8e1a39dac0953db61c21c97efc6006de/pointsippr/pointsippr.py#L66-L99
5,966
hydpy-dev/hydpy
hydpy/core/timetools.py
Date.from_cfunits
def from_cfunits(cls, units) -> 'Date': """Return a |Date| object representing the reference date of the given `units` string agreeing with the NetCDF-CF conventions. The following example string is taken from the `Time Coordinate`_ chapter of the NetCDF-CF conventions documentation (mo...
python
def from_cfunits(cls, units) -> 'Date': """Return a |Date| object representing the reference date of the given `units` string agreeing with the NetCDF-CF conventions. The following example string is taken from the `Time Coordinate`_ chapter of the NetCDF-CF conventions documentation (mo...
['def', 'from_cfunits', '(', 'cls', ',', 'units', ')', '->', "'Date'", ':', 'try', ':', 'string', '=', 'units', '[', 'units', '.', 'find', '(', "'since'", ')', '+', '6', ':', ']', 'idx', '=', 'string', '.', 'find', '(', "'.'", ')', 'if', 'idx', '!=', '-', '1', ':', 'jdx', '=', 'None', 'for', 'jdx', ',', 'char', 'in', '...
Return a |Date| object representing the reference date of the given `units` string agreeing with the NetCDF-CF conventions. The following example string is taken from the `Time Coordinate`_ chapter of the NetCDF-CF conventions documentation (modified). Note that the first entry (the uni...
['Return', 'a', '|Date|', 'object', 'representing', 'the', 'reference', 'date', 'of', 'the', 'given', 'units', 'string', 'agreeing', 'with', 'the', 'NetCDF', '-', 'CF', 'conventions', '.']
train
https://github.com/hydpy-dev/hydpy/blob/1bc6a82cf30786521d86b36e27900c6717d3348d/hydpy/core/timetools.py#L301-L361
5,967
junaruga/rpm-py-installer
install.py
Downloader.download_and_expand
def download_and_expand(self): """Download and expand RPM Python binding.""" top_dir_name = None if self.git_branch: # Download a source by git clone. top_dir_name = self._download_and_expand_by_git() else: # Download a source from the arcihve URL. ...
python
def download_and_expand(self): """Download and expand RPM Python binding.""" top_dir_name = None if self.git_branch: # Download a source by git clone. top_dir_name = self._download_and_expand_by_git() else: # Download a source from the arcihve URL. ...
['def', 'download_and_expand', '(', 'self', ')', ':', 'top_dir_name', '=', 'None', 'if', 'self', '.', 'git_branch', ':', '# Download a source by git clone.', 'top_dir_name', '=', 'self', '.', '_download_and_expand_by_git', '(', ')', 'else', ':', '# Download a source from the arcihve URL.', '# Downloading the compressed...
Download and expand RPM Python binding.
['Download', 'and', 'expand', 'RPM', 'Python', 'binding', '.']
train
https://github.com/junaruga/rpm-py-installer/blob/12f45feb0ba533dec8d0d16ef1e9b7fb8cfbd4ed/install.py#L412-L428
5,968
ausaki/subfinder
subfinder/utils.py
rm_subtitles
def rm_subtitles(path): """ delete all subtitles in path recursively """ sub_exts = ['ass', 'srt', 'sub'] count = 0 for root, dirs, files in os.walk(path): for f in files: _, ext = os.path.splitext(f) ext = ext[1:] if ext in sub_exts: p = o...
python
def rm_subtitles(path): """ delete all subtitles in path recursively """ sub_exts = ['ass', 'srt', 'sub'] count = 0 for root, dirs, files in os.walk(path): for f in files: _, ext = os.path.splitext(f) ext = ext[1:] if ext in sub_exts: p = o...
['def', 'rm_subtitles', '(', 'path', ')', ':', 'sub_exts', '=', '[', "'ass'", ',', "'srt'", ',', "'sub'", ']', 'count', '=', '0', 'for', 'root', ',', 'dirs', ',', 'files', 'in', 'os', '.', 'walk', '(', 'path', ')', ':', 'for', 'f', 'in', 'files', ':', '_', ',', 'ext', '=', 'os', '.', 'path', '.', 'splitext', '(', 'f', ...
delete all subtitles in path recursively
['delete', 'all', 'subtitles', 'in', 'path', 'recursively']
train
https://github.com/ausaki/subfinder/blob/b74b79214f618c603a551b9334ebb110ccf9684c/subfinder/utils.py#L11-L25
5,969
SheffieldML/GPyOpt
GPyOpt/optimization/optimizer.py
OptimizationWithContext.f_nc
def f_nc(self,x): ''' Wrapper of *f*: takes an input x with size of the noncontext dimensions expands it and evaluates the entire function. ''' x = np.atleast_2d(x) xx = self.context_manager._expand_vector(x) if x.shape[0] == 1: return self.f(xx)[0] ...
python
def f_nc(self,x): ''' Wrapper of *f*: takes an input x with size of the noncontext dimensions expands it and evaluates the entire function. ''' x = np.atleast_2d(x) xx = self.context_manager._expand_vector(x) if x.shape[0] == 1: return self.f(xx)[0] ...
['def', 'f_nc', '(', 'self', ',', 'x', ')', ':', 'x', '=', 'np', '.', 'atleast_2d', '(', 'x', ')', 'xx', '=', 'self', '.', 'context_manager', '.', '_expand_vector', '(', 'x', ')', 'if', 'x', '.', 'shape', '[', '0', ']', '==', '1', ':', 'return', 'self', '.', 'f', '(', 'xx', ')', '[', '0', ']', 'else', ':', 'return', 's...
Wrapper of *f*: takes an input x with size of the noncontext dimensions expands it and evaluates the entire function.
['Wrapper', 'of', '*', 'f', '*', ':', 'takes', 'an', 'input', 'x', 'with', 'size', 'of', 'the', 'noncontext', 'dimensions', 'expands', 'it', 'and', 'evaluates', 'the', 'entire', 'function', '.']
train
https://github.com/SheffieldML/GPyOpt/blob/255539dc5927819ca701e44fe3d76cd4864222fa/GPyOpt/optimization/optimizer.py#L203-L213
5,970
google-research/batch-ppo
agents/tools/wrappers.py
ConvertTo32Bit._convert_observ
def _convert_observ(self, observ): """Convert the observation to 32 bits. Args: observ: Numpy observation. Raises: ValueError: Observation contains infinite values. Returns: Numpy observation with 32-bit data type. """ if not np.isfinite(observ).all(): raise ValueError...
python
def _convert_observ(self, observ): """Convert the observation to 32 bits. Args: observ: Numpy observation. Raises: ValueError: Observation contains infinite values. Returns: Numpy observation with 32-bit data type. """ if not np.isfinite(observ).all(): raise ValueError...
['def', '_convert_observ', '(', 'self', ',', 'observ', ')', ':', 'if', 'not', 'np', '.', 'isfinite', '(', 'observ', ')', '.', 'all', '(', ')', ':', 'raise', 'ValueError', '(', "'Infinite observation encountered.'", ')', 'if', 'observ', '.', 'dtype', '==', 'np', '.', 'float64', ':', 'return', 'observ', '.', 'astype', '(...
Convert the observation to 32 bits. Args: observ: Numpy observation. Raises: ValueError: Observation contains infinite values. Returns: Numpy observation with 32-bit data type.
['Convert', 'the', 'observation', 'to', '32', 'bits', '.']
train
https://github.com/google-research/batch-ppo/blob/3d09705977bae4e7c3eb20339a3b384d2a5531e4/agents/tools/wrappers.py#L530-L548
5,971
apple/turicreate
src/unity/python/turicreate/data_structures/sframe.py
SFrame.add_column
def add_column(self, data, column_name="", inplace=False): """ Returns an SFrame with a new column. The number of elements in the data given must match the length of every other column of the SFrame. If no name is given, a default name is chosen. If inplace == False (default) th...
python
def add_column(self, data, column_name="", inplace=False): """ Returns an SFrame with a new column. The number of elements in the data given must match the length of every other column of the SFrame. If no name is given, a default name is chosen. If inplace == False (default) th...
['def', 'add_column', '(', 'self', ',', 'data', ',', 'column_name', '=', '""', ',', 'inplace', '=', 'False', ')', ':', '# Check type for pandas dataframe or SArray?', 'if', 'not', 'isinstance', '(', 'data', ',', 'SArray', ')', ':', 'if', 'isinstance', '(', 'data', ',', '_Iterable', ')', ':', 'data', '=', 'SArray', '(',...
Returns an SFrame with a new column. The number of elements in the data given must match the length of every other column of the SFrame. If no name is given, a default name is chosen. If inplace == False (default) this operation does not modify the current SFrame, returning a new SFrame...
['Returns', 'an', 'SFrame', 'with', 'a', 'new', 'column', '.', 'The', 'number', 'of', 'elements', 'in', 'the', 'data', 'given', 'must', 'match', 'the', 'length', 'of', 'every', 'other', 'column', 'of', 'the', 'SFrame', '.', 'If', 'no', 'name', 'is', 'given', 'a', 'default', 'name', 'is', 'chosen', '.']
train
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/unity/python/turicreate/data_structures/sframe.py#L3139-L3212
5,972
google/transitfeed
merge.py
DataSetMerger._MergeOptional
def _MergeOptional(self, a, b): """Tries to merge two values which may be None. If both values are not None, they are required to be the same and the merge is trivial. If one of the values is None and the other is not None, the merge results in the one which is not None. If both are None, the merge ...
python
def _MergeOptional(self, a, b): """Tries to merge two values which may be None. If both values are not None, they are required to be the same and the merge is trivial. If one of the values is None and the other is not None, the merge results in the one which is not None. If both are None, the merge ...
['def', '_MergeOptional', '(', 'self', ',', 'a', ',', 'b', ')', ':', 'if', 'a', 'and', 'b', ':', 'if', 'a', '!=', 'b', ':', 'raise', 'MergeError', '(', '"values must be identical if both specified "', '"(\'%s\' vs \'%s\')"', '%', '(', 'transitfeed', '.', 'EncodeUnicode', '(', 'a', ')', ',', 'transitfeed', '.', 'EncodeU...
Tries to merge two values which may be None. If both values are not None, they are required to be the same and the merge is trivial. If one of the values is None and the other is not None, the merge results in the one which is not None. If both are None, the merge results in None. Args: a: T...
['Tries', 'to', 'merge', 'two', 'values', 'which', 'may', 'be', 'None', '.']
train
https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/merge.py#L435-L458
5,973
KelSolaar/Umbra
umbra/components/factory/script_editor/editor_status.py
EditorStatus.__Languages_comboBox_set_default_view_state
def __Languages_comboBox_set_default_view_state(self): """ Sets the **Languages_comboBox** Widget default View state. """ if not self.__container.has_editor_tab(): return editor = self.__container.get_current_editor() index = self.Languages_comboBox.findText...
python
def __Languages_comboBox_set_default_view_state(self): """ Sets the **Languages_comboBox** Widget default View state. """ if not self.__container.has_editor_tab(): return editor = self.__container.get_current_editor() index = self.Languages_comboBox.findText...
['def', '__Languages_comboBox_set_default_view_state', '(', 'self', ')', ':', 'if', 'not', 'self', '.', '__container', '.', 'has_editor_tab', '(', ')', ':', 'return', 'editor', '=', 'self', '.', '__container', '.', 'get_current_editor', '(', ')', 'index', '=', 'self', '.', 'Languages_comboBox', '.', 'findText', '(', 'e...
Sets the **Languages_comboBox** Widget default View state.
['Sets', 'the', '**', 'Languages_comboBox', '**', 'Widget', 'default', 'View', 'state', '.']
train
https://github.com/KelSolaar/Umbra/blob/66f45f08d9d723787f1191989f8b0dda84b412ce/umbra/components/factory/script_editor/editor_status.py#L152-L163
5,974
pkgw/pwkit
pwkit/sherpa.py
make_fixed_temp_multi_apec
def make_fixed_temp_multi_apec(kTs, name_template='apec%d', norm=None): """Create a model summing multiple APEC components at fixed temperatures. *kTs* An iterable of temperatures for the components, in keV. *name_template* = 'apec%d' A template to use for the names of each component; it is str...
python
def make_fixed_temp_multi_apec(kTs, name_template='apec%d', norm=None): """Create a model summing multiple APEC components at fixed temperatures. *kTs* An iterable of temperatures for the components, in keV. *name_template* = 'apec%d' A template to use for the names of each component; it is str...
['def', 'make_fixed_temp_multi_apec', '(', 'kTs', ',', 'name_template', '=', "'apec%d'", ',', 'norm', '=', 'None', ')', ':', 'total_model', '=', 'None', 'sub_models', '=', '[', ']', 'for', 'i', ',', 'kT', 'in', 'enumerate', '(', 'kTs', ')', ':', 'component', '=', 'ui', '.', 'xsapec', '(', 'name_template', '%', 'i', ')'...
Create a model summing multiple APEC components at fixed temperatures. *kTs* An iterable of temperatures for the components, in keV. *name_template* = 'apec%d' A template to use for the names of each component; it is string-formatted with the 0-based component number as an argument. *norm...
['Create', 'a', 'model', 'summing', 'multiple', 'APEC', 'components', 'at', 'fixed', 'temperatures', '.']
train
https://github.com/pkgw/pwkit/blob/d40957a1c3d2ea34e7ceac2267ee9635135f2793/pwkit/sherpa.py#L102-L140
5,975
instaloader/instaloader
instaloader/instaloader.py
Instaloader.save_location
def save_location(self, filename: str, location: PostLocation, mtime: datetime) -> None: """Save post location name and Google Maps link.""" filename += '_location.txt' location_string = (location.name + "\n" + "https://maps.google.com/maps?q={0},{1}&ll={0},{1}\n".form...
python
def save_location(self, filename: str, location: PostLocation, mtime: datetime) -> None: """Save post location name and Google Maps link.""" filename += '_location.txt' location_string = (location.name + "\n" + "https://maps.google.com/maps?q={0},{1}&ll={0},{1}\n".form...
['def', 'save_location', '(', 'self', ',', 'filename', ':', 'str', ',', 'location', ':', 'PostLocation', ',', 'mtime', ':', 'datetime', ')', '->', 'None', ':', 'filename', '+=', "'_location.txt'", 'location_string', '=', '(', 'location', '.', 'name', '+', '"\\n"', '+', '"https://maps.google.com/maps?q={0},{1}&ll={0},{1...
Save post location name and Google Maps link.
['Save', 'post', 'location', 'name', 'and', 'Google', 'Maps', 'link', '.']
train
https://github.com/instaloader/instaloader/blob/87d877e650cd8020b04b8b51be120599a441fd5b/instaloader/instaloader.py#L323-L332
5,976
lincolnloop/salmon
salmon/metrics/models.py
Metric.whisper_filename
def whisper_filename(self): """Build a file path to the Whisper database""" source_name = self.source_id and self.source.name or '' return get_valid_filename("{0}__{1}.wsp".format(source_name, self.name))
python
def whisper_filename(self): """Build a file path to the Whisper database""" source_name = self.source_id and self.source.name or '' return get_valid_filename("{0}__{1}.wsp".format(source_name, self.name))
['def', 'whisper_filename', '(', 'self', ')', ':', 'source_name', '=', 'self', '.', 'source_id', 'and', 'self', '.', 'source', '.', 'name', 'or', "''", 'return', 'get_valid_filename', '(', '"{0}__{1}.wsp"', '.', 'format', '(', 'source_name', ',', 'self', '.', 'name', ')', ')']
Build a file path to the Whisper database
['Build', 'a', 'file', 'path', 'to', 'the', 'Whisper', 'database']
train
https://github.com/lincolnloop/salmon/blob/62a965ad9716707ea1db4afb5d9646766f29b64b/salmon/metrics/models.py#L69-L73
5,977
acsone/bobtemplates.odoo
bobtemplates/odoo/hooks.py
_insert_manifest_item
def _insert_manifest_item(configurator, key, item): """ Insert an item in the list of an existing manifest key """ with _open_manifest(configurator) as f: manifest = f.read() if item in ast.literal_eval(manifest).get(key, []): return pattern = """(["']{}["']:\\s*\\[)""".format(key) r...
python
def _insert_manifest_item(configurator, key, item): """ Insert an item in the list of an existing manifest key """ with _open_manifest(configurator) as f: manifest = f.read() if item in ast.literal_eval(manifest).get(key, []): return pattern = """(["']{}["']:\\s*\\[)""".format(key) r...
['def', '_insert_manifest_item', '(', 'configurator', ',', 'key', ',', 'item', ')', ':', 'with', '_open_manifest', '(', 'configurator', ')', 'as', 'f', ':', 'manifest', '=', 'f', '.', 'read', '(', ')', 'if', 'item', 'in', 'ast', '.', 'literal_eval', '(', 'manifest', ')', '.', 'get', '(', 'key', ',', '[', ']', ')', ':',...
Insert an item in the list of an existing manifest key
['Insert', 'an', 'item', 'in', 'the', 'list', 'of', 'an', 'existing', 'manifest', 'key']
train
https://github.com/acsone/bobtemplates.odoo/blob/6e8c3cb12747d8b5af5a9821f995f285251e4d4d/bobtemplates/odoo/hooks.py#L58-L68
5,978
DataKitchen/DKCloudCommand
DKCloudCommand/cli/__main__.py
recipe_create
def recipe_create(backend, kitchen, name): """ Create a new Recipe """ err_str, use_kitchen = Backend.get_kitchen_from_user(kitchen) if use_kitchen is None: raise click.ClickException(err_str) click.secho("%s - Creating Recipe %s for Kitchen '%s'" % (get_datetime(), name, use_kitchen), f...
python
def recipe_create(backend, kitchen, name): """ Create a new Recipe """ err_str, use_kitchen = Backend.get_kitchen_from_user(kitchen) if use_kitchen is None: raise click.ClickException(err_str) click.secho("%s - Creating Recipe %s for Kitchen '%s'" % (get_datetime(), name, use_kitchen), f...
['def', 'recipe_create', '(', 'backend', ',', 'kitchen', ',', 'name', ')', ':', 'err_str', ',', 'use_kitchen', '=', 'Backend', '.', 'get_kitchen_from_user', '(', 'kitchen', ')', 'if', 'use_kitchen', 'is', 'None', ':', 'raise', 'click', '.', 'ClickException', '(', 'err_str', ')', 'click', '.', 'secho', '(', '"%s - Creat...
Create a new Recipe
['Create', 'a', 'new', 'Recipe']
train
https://github.com/DataKitchen/DKCloudCommand/blob/1cf9cb08ab02f063eef6b5c4b327af142991daa3/DKCloudCommand/cli/__main__.py#L432-L440
5,979
mete0r/hypua2jamo
src/hypua2jamo/decoder.py
_uptrace
def _uptrace(nodelist, node): ''' 노드를 상향 추적한다. 현 노드로부터 조상 노드들을 차례로 순회하며 반환한다. 루트 노드는 제외한다. ''' if node.parent_index is None: return parent = nodelist[node.parent_index] for x in _uptrace(nodelist, parent): yield x yield node
python
def _uptrace(nodelist, node): ''' 노드를 상향 추적한다. 현 노드로부터 조상 노드들을 차례로 순회하며 반환한다. 루트 노드는 제외한다. ''' if node.parent_index is None: return parent = nodelist[node.parent_index] for x in _uptrace(nodelist, parent): yield x yield node
['def', '_uptrace', '(', 'nodelist', ',', 'node', ')', ':', 'if', 'node', '.', 'parent_index', 'is', 'None', ':', 'return', 'parent', '=', 'nodelist', '[', 'node', '.', 'parent_index', ']', 'for', 'x', 'in', '_uptrace', '(', 'nodelist', ',', 'parent', ')', ':', 'yield', 'x', 'yield', 'node']
노드를 상향 추적한다. 현 노드로부터 조상 노드들을 차례로 순회하며 반환한다. 루트 노드는 제외한다.
['노드를', '상향', '추적한다', '.']
train
https://github.com/mete0r/hypua2jamo/blob/caceb33a26c27645703d659a82bb1152deef1469/src/hypua2jamo/decoder.py#L262-L275
5,980
DallasMorningNews/django-datafreezer
datafreezer/views.py
parse_csv_headers
def parse_csv_headers(dataset_id): """Return the first row of a CSV as a list of headers.""" data = Dataset.objects.get(pk=dataset_id) with open(data.dataset_file.path, 'r') as datasetFile: csvReader = reader(datasetFile, delimiter=',', quotechar='"') headers = next(csvReader) # prin...
python
def parse_csv_headers(dataset_id): """Return the first row of a CSV as a list of headers.""" data = Dataset.objects.get(pk=dataset_id) with open(data.dataset_file.path, 'r') as datasetFile: csvReader = reader(datasetFile, delimiter=',', quotechar='"') headers = next(csvReader) # prin...
['def', 'parse_csv_headers', '(', 'dataset_id', ')', ':', 'data', '=', 'Dataset', '.', 'objects', '.', 'get', '(', 'pk', '=', 'dataset_id', ')', 'with', 'open', '(', 'data', '.', 'dataset_file', '.', 'path', ',', "'r'", ')', 'as', 'datasetFile', ':', 'csvReader', '=', 'reader', '(', 'datasetFile', ',', 'delimiter', '='...
Return the first row of a CSV as a list of headers.
['Return', 'the', 'first', 'row', 'of', 'a', 'CSV', 'as', 'a', 'list', 'of', 'headers', '.']
train
https://github.com/DallasMorningNews/django-datafreezer/blob/982dcf2015c80a280f1a093e32977cb71d4ea7aa/datafreezer/views.py#L253-L260
5,981
gitpython-developers/GitPython
git/repo/base.py
Repo.config_reader
def config_reader(self, config_level=None): """ :return: GitConfigParser allowing to read the full git configuration, but not to write it The configuration will include values from the system, user and repository configuration files. :param config_level: ...
python
def config_reader(self, config_level=None): """ :return: GitConfigParser allowing to read the full git configuration, but not to write it The configuration will include values from the system, user and repository configuration files. :param config_level: ...
['def', 'config_reader', '(', 'self', ',', 'config_level', '=', 'None', ')', ':', 'files', '=', 'None', 'if', 'config_level', 'is', 'None', ':', 'files', '=', '[', 'self', '.', '_get_config_path', '(', 'f', ')', 'for', 'f', 'in', 'self', '.', 'config_level', ']', 'else', ':', 'files', '=', '[', 'self', '.', '_get_confi...
:return: GitConfigParser allowing to read the full git configuration, but not to write it The configuration will include values from the system, user and repository configuration files. :param config_level: For possible values, see config_writer method ...
[':', 'return', ':', 'GitConfigParser', 'allowing', 'to', 'read', 'the', 'full', 'git', 'configuration', 'but', 'not', 'to', 'write', 'it']
train
https://github.com/gitpython-developers/GitPython/blob/1f66e25c25cde2423917ee18c4704fff83b837d1/git/repo/base.py#L438-L457
5,982
ymyzk/python-gyazo
gyazo/image.py
Image.to_dict
def to_dict(self): """Return a dict representation of this instance""" data = {} if self.created_at: data['created_at'] = self.created_at.strftime( '%Y-%m-%dT%H:%M:%S%z') if self.image_id: data['image_id'] = self.image_id if self.permalink...
python
def to_dict(self): """Return a dict representation of this instance""" data = {} if self.created_at: data['created_at'] = self.created_at.strftime( '%Y-%m-%dT%H:%M:%S%z') if self.image_id: data['image_id'] = self.image_id if self.permalink...
['def', 'to_dict', '(', 'self', ')', ':', 'data', '=', '{', '}', 'if', 'self', '.', 'created_at', ':', 'data', '[', "'created_at'", ']', '=', 'self', '.', 'created_at', '.', 'strftime', '(', "'%Y-%m-%dT%H:%M:%S%z'", ')', 'if', 'self', '.', 'image_id', ':', 'data', '[', "'image_id'", ']', '=', 'self', '.', 'image_id', '...
Return a dict representation of this instance
['Return', 'a', 'dict', 'representation', 'of', 'this', 'instance']
train
https://github.com/ymyzk/python-gyazo/blob/52893118899ed308ff75245b55f73d745c98ed1d/gyazo/image.py#L111-L129
5,983
wmayner/pyphi
pyphi/distance.py
klm
def klm(p, q): """Compute the KLM divergence.""" p, q = flatten(p), flatten(q) return max(abs(p * np.nan_to_num(np.log(p / q))))
python
def klm(p, q): """Compute the KLM divergence.""" p, q = flatten(p), flatten(q) return max(abs(p * np.nan_to_num(np.log(p / q))))
['def', 'klm', '(', 'p', ',', 'q', ')', ':', 'p', ',', 'q', '=', 'flatten', '(', 'p', ')', ',', 'flatten', '(', 'q', ')', 'return', 'max', '(', 'abs', '(', 'p', '*', 'np', '.', 'nan_to_num', '(', 'np', '.', 'log', '(', 'p', '/', 'q', ')', ')', ')', ')']
Compute the KLM divergence.
['Compute', 'the', 'KLM', 'divergence', '.']
train
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/distance.py#L236-L239
5,984
meowklaski/custom_inherit
custom_inherit/_doc_parse_tools/numpy_parse_tools.py
parse_numpy_doc
def parse_numpy_doc(doc): """ Extract the text from the various sections of a numpy-formatted docstring. Parameters ---------- doc: Union[str, None] Returns ------- OrderedDict[str, Union[None,str]] The extracted numpy-styled docstring sections.""" ...
python
def parse_numpy_doc(doc): """ Extract the text from the various sections of a numpy-formatted docstring. Parameters ---------- doc: Union[str, None] Returns ------- OrderedDict[str, Union[None,str]] The extracted numpy-styled docstring sections.""" ...
['def', 'parse_numpy_doc', '(', 'doc', ')', ':', 'doc_sections', '=', 'OrderedDict', '(', '[', '(', '"Short Summary"', ',', 'None', ')', ',', '(', '"Deprecation Warning"', ',', 'None', ')', ',', '(', '"Attributes"', ',', 'None', ')', ',', '(', '"Extended Summary"', ',', 'None', ')', ',', '(', '"Parameters"', ',', 'None...
Extract the text from the various sections of a numpy-formatted docstring. Parameters ---------- doc: Union[str, None] Returns ------- OrderedDict[str, Union[None,str]] The extracted numpy-styled docstring sections.
['Extract', 'the', 'text', 'from', 'the', 'various', 'sections', 'of', 'a', 'numpy', '-', 'formatted', 'docstring', '.']
train
https://github.com/meowklaski/custom_inherit/blob/13bce675e246d84e21bcd7658e0a4fbf25db4adc/custom_inherit/_doc_parse_tools/numpy_parse_tools.py#L8-L56
5,985
codeforamerica/epa_python
epa/pcs/pcs.py
PCS.permit_event
def permit_event(self, column=None, value=None, **kwargs): """ A permit event tracks the lifecycle of a permit from issuance to expiration. Examples include 'Application Received' and 'Permit Issued', etc. >>> PCS().permit_event('event_actual_date', '16-MAR-04') """ ...
python
def permit_event(self, column=None, value=None, **kwargs): """ A permit event tracks the lifecycle of a permit from issuance to expiration. Examples include 'Application Received' and 'Permit Issued', etc. >>> PCS().permit_event('event_actual_date', '16-MAR-04') """ ...
['def', 'permit_event', '(', 'self', ',', 'column', '=', 'None', ',', 'value', '=', 'None', ',', '*', '*', 'kwargs', ')', ':', 'return', 'self', '.', '_resolve_call', '(', "'PCS_PERMIT_EVENT'", ',', 'column', ',', 'value', ',', '*', '*', 'kwargs', ')']
A permit event tracks the lifecycle of a permit from issuance to expiration. Examples include 'Application Received' and 'Permit Issued', etc. >>> PCS().permit_event('event_actual_date', '16-MAR-04')
['A', 'permit', 'event', 'tracks', 'the', 'lifecycle', 'of', 'a', 'permit', 'from', 'issuance', 'to', 'expiration', '.', 'Examples', 'include', 'Application', 'Received', 'and', 'Permit', 'Issued', 'etc', '.']
train
https://github.com/codeforamerica/epa_python/blob/62a53da62936bea8daa487a01a52b973e9062b2c/epa/pcs/pcs.py#L144-L152
5,986
robotools/fontParts
Lib/fontParts/base/bPoint.py
BaseBPoint._set_anchor
def _set_anchor(self, value): """ Subclasses may override this method. """ pX, pY = self.anchor x, y = value dX = x - pX dY = y - pY self.moveBy((dX, dY))
python
def _set_anchor(self, value): """ Subclasses may override this method. """ pX, pY = self.anchor x, y = value dX = x - pX dY = y - pY self.moveBy((dX, dY))
['def', '_set_anchor', '(', 'self', ',', 'value', ')', ':', 'pX', ',', 'pY', '=', 'self', '.', 'anchor', 'x', ',', 'y', '=', 'value', 'dX', '=', 'x', '-', 'pX', 'dY', '=', 'y', '-', 'pY', 'self', '.', 'moveBy', '(', '(', 'dX', ',', 'dY', ')', ')']
Subclasses may override this method.
['Subclasses', 'may', 'override', 'this', 'method', '.']
train
https://github.com/robotools/fontParts/blob/d2ff106fe95f9d566161d936a645157626568712/Lib/fontParts/base/bPoint.py#L157-L165
5,987
marten-de-vries/Flask-WebSub
flask_websub/hub/tasks.py
send_change_notification
def send_change_notification(hub, topic_url, updated_content=None): """7. Content Distribution""" if updated_content: body = base64.b64decode(updated_content['content']) else: body, updated_content = get_new_content(hub.config, topic_url) b64_body = updated_content['content'] heade...
python
def send_change_notification(hub, topic_url, updated_content=None): """7. Content Distribution""" if updated_content: body = base64.b64decode(updated_content['content']) else: body, updated_content = get_new_content(hub.config, topic_url) b64_body = updated_content['content'] heade...
['def', 'send_change_notification', '(', 'hub', ',', 'topic_url', ',', 'updated_content', '=', 'None', ')', ':', 'if', 'updated_content', ':', 'body', '=', 'base64', '.', 'b64decode', '(', 'updated_content', '[', "'content'", ']', ')', 'else', ':', 'body', ',', 'updated_content', '=', 'get_new_content', '(', 'hub', '.'...
7. Content Distribution
['7', '.', 'Content', 'Distribution']
train
https://github.com/marten-de-vries/Flask-WebSub/blob/422d5b597245554c47e881483f99cae7c57a81ba/flask_websub/hub/tasks.py#L18-L34
5,988
locationlabs/mockredis
mockredis/client.py
MockRedis._get_zset
def _get_zset(self, name, operation, create=False): """ Get (and maybe create) a sorted set by name. """ return self._get_by_type(name, operation, create, b'zset', SortedSet(), return_default=False)
python
def _get_zset(self, name, operation, create=False): """ Get (and maybe create) a sorted set by name. """ return self._get_by_type(name, operation, create, b'zset', SortedSet(), return_default=False)
['def', '_get_zset', '(', 'self', ',', 'name', ',', 'operation', ',', 'create', '=', 'False', ')', ':', 'return', 'self', '.', '_get_by_type', '(', 'name', ',', 'operation', ',', 'create', ',', "b'zset'", ',', 'SortedSet', '(', ')', ',', 'return_default', '=', 'False', ')']
Get (and maybe create) a sorted set by name.
['Get', '(', 'and', 'maybe', 'create', ')', 'a', 'sorted', 'set', 'by', 'name', '.']
train
https://github.com/locationlabs/mockredis/blob/fd4e3117066ff0c24e86ebca007853a8092e3254/mockredis/client.py#L1459-L1463
5,989
AlexMathew/scrapple
scrapple/commands/web.py
WebCommand.execute_command
def execute_command(self): """ The web command runs the Scrapple web interface through a simple \ `Flask <http://flask.pocoo.org>`_ app. When the execute_command() method is called from the \ :ref:`runCLI() <implementation-cli>` function, it starts of two simultaneous \ ...
python
def execute_command(self): """ The web command runs the Scrapple web interface through a simple \ `Flask <http://flask.pocoo.org>`_ app. When the execute_command() method is called from the \ :ref:`runCLI() <implementation-cli>` function, it starts of two simultaneous \ ...
['def', 'execute_command', '(', 'self', ')', ':', 'print', '(', 'Back', '.', 'GREEN', '+', 'Fore', '.', 'BLACK', '+', '"Scrapple Web Interface"', ')', 'print', '(', 'Back', '.', 'RESET', '+', 'Fore', '.', 'RESET', ')', 'p1', '=', 'Process', '(', 'target', '=', 'self', '.', 'run_flask', ')', 'p2', '=', 'Process', '(', '...
The web command runs the Scrapple web interface through a simple \ `Flask <http://flask.pocoo.org>`_ app. When the execute_command() method is called from the \ :ref:`runCLI() <implementation-cli>` function, it starts of two simultaneous \ processes : - Calls the run_flask() ...
['The', 'web', 'command', 'runs', 'the', 'Scrapple', 'web', 'interface', 'through', 'a', 'simple', '\\', 'Flask', '<http', ':', '//', 'flask', '.', 'pocoo', '.', 'org', '>', '_', 'app', '.']
train
https://github.com/AlexMathew/scrapple/blob/eeb604601b155d6cc7e035855ff4d3f48f8bed74/scrapple/commands/web.py#L39-L67
5,990
hydroshare/hs_restclient
hs_restclient/__init__.py
HydroShare.getUserInfo
def getUserInfo(self): """ Query the GET /hsapi/userInfo/ REST end point of the HydroShare server. :raises: HydroShareHTTPException to signal an HTTP error :return: A JSON object representing user info, for example: { "username": "username", "first_name...
python
def getUserInfo(self): """ Query the GET /hsapi/userInfo/ REST end point of the HydroShare server. :raises: HydroShareHTTPException to signal an HTTP error :return: A JSON object representing user info, for example: { "username": "username", "first_name...
['def', 'getUserInfo', '(', 'self', ')', ':', 'url', '=', '"{url_base}/userInfo/"', '.', 'format', '(', 'url_base', '=', 'self', '.', 'url_base', ')', 'r', '=', 'self', '.', '_request', '(', "'GET'", ',', 'url', ')', 'if', 'r', '.', 'status_code', '!=', '200', ':', 'raise', 'HydroShareHTTPException', '(', '(', 'url', '...
Query the GET /hsapi/userInfo/ REST end point of the HydroShare server. :raises: HydroShareHTTPException to signal an HTTP error :return: A JSON object representing user info, for example: { "username": "username", "first_name": "First", "last_name": "Last"...
['Query', 'the', 'GET', '/', 'hsapi', '/', 'userInfo', '/', 'REST', 'end', 'point', 'of', 'the', 'HydroShare', 'server', '.']
train
https://github.com/hydroshare/hs_restclient/blob/9cd106238b512e01ecd3e33425fe48c13b7f63d5/hs_restclient/__init__.py#L1180-L1201
5,991
Syndace/python-x3dh
x3dh/state.py
State.getSharedSecretPassive
def getSharedSecretPassive( self, passive_exchange_data, allow_no_otpk = False, keep_otpk = False ): """ Do the key exchange, as the passive party. This involves retrieving data about the key exchange from the active party. :param passive_exchange_dat...
python
def getSharedSecretPassive( self, passive_exchange_data, allow_no_otpk = False, keep_otpk = False ): """ Do the key exchange, as the passive party. This involves retrieving data about the key exchange from the active party. :param passive_exchange_dat...
['def', 'getSharedSecretPassive', '(', 'self', ',', 'passive_exchange_data', ',', 'allow_no_otpk', '=', 'False', ',', 'keep_otpk', '=', 'False', ')', ':', 'self', '.', '__checkSPKTimestamp', '(', ')', 'other_ik', '=', 'self', '.', '__KeyPair', '(', 'pub', '=', 'passive_exchange_data', '[', '"ik"', ']', ')', 'other_ek',...
Do the key exchange, as the passive party. This involves retrieving data about the key exchange from the active party. :param passive_exchange_data: A structure generated by the active party, which contains data requried to complete the key exchange. See the "to_other" part of t...
['Do', 'the', 'key', 'exchange', 'as', 'the', 'passive', 'party', '.', 'This', 'involves', 'retrieving', 'data', 'about', 'the', 'key', 'exchange', 'from', 'the', 'active', 'party', '.']
train
https://github.com/Syndace/python-x3dh/blob/a6cec1ae858121b88bef1b178f5cda5e43d5c391/x3dh/state.py#L440-L545
5,992
PyGithub/PyGithub
github/AuthenticatedUser.py
AuthenticatedUser.create_repo
def create_repo(self, name, description=github.GithubObject.NotSet, homepage=github.GithubObject.NotSet, private=github.GithubObject.NotSet, has_issues=github.GithubObject.NotSet, has_wiki=github.GithubObject.NotSet, has_downloads=github.GithubObject.NotSet, h...
python
def create_repo(self, name, description=github.GithubObject.NotSet, homepage=github.GithubObject.NotSet, private=github.GithubObject.NotSet, has_issues=github.GithubObject.NotSet, has_wiki=github.GithubObject.NotSet, has_downloads=github.GithubObject.NotSet, h...
['def', 'create_repo', '(', 'self', ',', 'name', ',', 'description', '=', 'github', '.', 'GithubObject', '.', 'NotSet', ',', 'homepage', '=', 'github', '.', 'GithubObject', '.', 'NotSet', ',', 'private', '=', 'github', '.', 'GithubObject', '.', 'NotSet', ',', 'has_issues', '=', 'github', '.', 'GithubObject', '.', 'NotS...
:calls: `POST /user/repos <http://developer.github.com/v3/repos>`_ :param name: string :param description: string :param homepage: string :param private: bool :param has_issues: bool :param has_wiki: bool :param has_downloads: bool :param has_projects: boo...
[':', 'calls', ':', 'POST', '/', 'user', '/', 'repos', '<http', ':', '//', 'developer', '.', 'github', '.', 'com', '/', 'v3', '/', 'repos', '>', '_', ':', 'param', 'name', ':', 'string', ':', 'param', 'description', ':', 'string', ':', 'param', 'homepage', ':', 'string', ':', 'param', 'private', ':', 'bool', ':', 'para...
train
https://github.com/PyGithub/PyGithub/blob/f716df86bbe7dc276c6596699fa9712b61ef974c/github/AuthenticatedUser.py#L529-L601
5,993
pyamg/pyamg
pyamg/krylov/_gmres_mgs.py
gmres_mgs
def gmres_mgs(A, b, x0=None, tol=1e-5, restrt=None, maxiter=None, xtype=None, M=None, callback=None, residuals=None, reorth=False): """Generalized Minimum Residual Method (GMRES) based on MGS. GMRES iteratively refines the initial solution guess to the system Ax = b Modified Gram-Schmidt ...
python
def gmres_mgs(A, b, x0=None, tol=1e-5, restrt=None, maxiter=None, xtype=None, M=None, callback=None, residuals=None, reorth=False): """Generalized Minimum Residual Method (GMRES) based on MGS. GMRES iteratively refines the initial solution guess to the system Ax = b Modified Gram-Schmidt ...
['def', 'gmres_mgs', '(', 'A', ',', 'b', ',', 'x0', '=', 'None', ',', 'tol', '=', '1e-5', ',', 'restrt', '=', 'None', ',', 'maxiter', '=', 'None', ',', 'xtype', '=', 'None', ',', 'M', '=', 'None', ',', 'callback', '=', 'None', ',', 'residuals', '=', 'None', ',', 'reorth', '=', 'False', ')', ':', '# Convert inputs to li...
Generalized Minimum Residual Method (GMRES) based on MGS. GMRES iteratively refines the initial solution guess to the system Ax = b Modified Gram-Schmidt version Parameters ---------- A : array, matrix, sparse matrix, LinearOperator n x n, linear system to solve b : array, matrix ...
['Generalized', 'Minimum', 'Residual', 'Method', '(', 'GMRES', ')', 'based', 'on', 'MGS', '.']
train
https://github.com/pyamg/pyamg/blob/89dc54aa27e278f65d2f54bdaf16ab97d7768fa6/pyamg/krylov/_gmres_mgs.py#L41-L365
5,994
quantopian/zipline
zipline/data/history_loader.py
HistoryLoader.history
def history(self, assets, dts, field, is_perspective_after): """ A window of pricing data with adjustments applied assuming that the end of the window is the day before the current simulation time. Parameters ---------- assets : iterable of Assets The assets ...
python
def history(self, assets, dts, field, is_perspective_after): """ A window of pricing data with adjustments applied assuming that the end of the window is the day before the current simulation time. Parameters ---------- assets : iterable of Assets The assets ...
['def', 'history', '(', 'self', ',', 'assets', ',', 'dts', ',', 'field', ',', 'is_perspective_after', ')', ':', 'block', '=', 'self', '.', '_ensure_sliding_windows', '(', 'assets', ',', 'dts', ',', 'field', ',', 'is_perspective_after', ')', 'end_ix', '=', 'self', '.', '_calendar', '.', 'searchsorted', '(', 'dts', '[', ...
A window of pricing data with adjustments applied assuming that the end of the window is the day before the current simulation time. Parameters ---------- assets : iterable of Assets The assets in the window. dts : iterable of datetime64-like The datetime...
['A', 'window', 'of', 'pricing', 'data', 'with', 'adjustments', 'applied', 'assuming', 'that', 'the', 'end', 'of', 'the', 'window', 'is', 'the', 'day', 'before', 'the', 'current', 'simulation', 'time', '.']
train
https://github.com/quantopian/zipline/blob/77ad15e6dc4c1cbcdc133653bac8a63fc704f7fe/zipline/data/history_loader.py#L471-L555
5,995
pybel/pybel
src/pybel/manager/cache_manager.py
NamespaceManager.get_namespace_by_url
def get_namespace_by_url(self, url: str) -> Optional[Namespace]: """Look up a namespace by url.""" return self.session.query(Namespace).filter(Namespace.url == url).one_or_none()
python
def get_namespace_by_url(self, url: str) -> Optional[Namespace]: """Look up a namespace by url.""" return self.session.query(Namespace).filter(Namespace.url == url).one_or_none()
['def', 'get_namespace_by_url', '(', 'self', ',', 'url', ':', 'str', ')', '->', 'Optional', '[', 'Namespace', ']', ':', 'return', 'self', '.', 'session', '.', 'query', '(', 'Namespace', ')', '.', 'filter', '(', 'Namespace', '.', 'url', '==', 'url', ')', '.', 'one_or_none', '(', ')']
Look up a namespace by url.
['Look', 'up', 'a', 'namespace', 'by', 'url', '.']
train
https://github.com/pybel/pybel/blob/c8a7a1bdae4c475fa2a8c77f3a9a5f6d79556ca0/src/pybel/manager/cache_manager.py#L149-L151
5,996
amaas-fintech/amaas-core-sdk-python
amaascore/config.py
ConfigFactory.lookup
def lookup(self, section, name): """Lookup config value.""" value = os.environ.get('AMAAS_{}'.format(name.upper())) if value: return value try: value = self.file_config.get(section, name) except ConfigParserError: pass else: ...
python
def lookup(self, section, name): """Lookup config value.""" value = os.environ.get('AMAAS_{}'.format(name.upper())) if value: return value try: value = self.file_config.get(section, name) except ConfigParserError: pass else: ...
['def', 'lookup', '(', 'self', ',', 'section', ',', 'name', ')', ':', 'value', '=', 'os', '.', 'environ', '.', 'get', '(', "'AMAAS_{}'", '.', 'format', '(', 'name', '.', 'upper', '(', ')', ')', ')', 'if', 'value', ':', 'return', 'value', 'try', ':', 'value', '=', 'self', '.', 'file_config', '.', 'get', '(', 'section', ...
Lookup config value.
['Lookup', 'config', 'value', '.']
train
https://github.com/amaas-fintech/amaas-core-sdk-python/blob/347b71f8e776b2dde582b015e31b4802d91e8040/amaascore/config.py#L86-L100
5,997
mushkevych/scheduler
synergy/scheduler/state_machine_recomputing.py
StateMachineRecomputing._process_state_in_progress
def _process_state_in_progress(self, job_record): """ method that takes care of processing job records in STATE_IN_PROGRESS state""" start_timeperiod = self.compute_start_timeperiod(job_record.process_name, job_record.timeperiod) end_timeperiod = self.compute_end_timeperiod(job_record.process_na...
python
def _process_state_in_progress(self, job_record): """ method that takes care of processing job records in STATE_IN_PROGRESS state""" start_timeperiod = self.compute_start_timeperiod(job_record.process_name, job_record.timeperiod) end_timeperiod = self.compute_end_timeperiod(job_record.process_na...
['def', '_process_state_in_progress', '(', 'self', ',', 'job_record', ')', ':', 'start_timeperiod', '=', 'self', '.', 'compute_start_timeperiod', '(', 'job_record', '.', 'process_name', ',', 'job_record', '.', 'timeperiod', ')', 'end_timeperiod', '=', 'self', '.', 'compute_end_timeperiod', '(', 'job_record', '.', 'proc...
method that takes care of processing job records in STATE_IN_PROGRESS state
['method', 'that', 'takes', 'care', 'of', 'processing', 'job', 'records', 'in', 'STATE_IN_PROGRESS', 'state']
train
https://github.com/mushkevych/scheduler/blob/6740331360f49083c208085fb5a60ce80ebf418b/synergy/scheduler/state_machine_recomputing.py#L84-L113
5,998
buckmaxwell/neoapi
neoapi/serializable_structured_node.py
SerializableStructuredNode.create_resource
def create_resource(cls, request_json): r""" Used to create a node in the database of type 'cls' in response to a POST request. create_resource should only \ be invoked on a resource when the client specifies a POST request. :param request_json: a dictionary formatted according to the s...
python
def create_resource(cls, request_json): r""" Used to create a node in the database of type 'cls' in response to a POST request. create_resource should only \ be invoked on a resource when the client specifies a POST request. :param request_json: a dictionary formatted according to the s...
['def', 'create_resource', '(', 'cls', ',', 'request_json', ')', ':', 'response', '=', 'dict', '(', ')', 'new_resource', ',', 'location', '=', 'None', ',', 'None', 'try', ':', 'data', '=', 'request_json', '[', "'data'", ']', 'if', 'data', '[', "'type'", ']', '!=', 'cls', '.', '__type__', ':', 'raise', 'WrongTypeError',...
r""" Used to create a node in the database of type 'cls' in response to a POST request. create_resource should only \ be invoked on a resource when the client specifies a POST request. :param request_json: a dictionary formatted according to the specification at \ http://jsonapi.org/for...
['r', 'Used', 'to', 'create', 'a', 'node', 'in', 'the', 'database', 'of', 'type', 'cls', 'in', 'response', 'to', 'a', 'POST', 'request', '.', 'create_resource', 'should', 'only', '\\', 'be', 'invoked', 'on', 'a', 'resource', 'when', 'the', 'client', 'specifies', 'a', 'POST', 'request', '.']
train
https://github.com/buckmaxwell/neoapi/blob/96c5d83c847d7a12d3d1f17931d85776f5280877/neoapi/serializable_structured_node.py#L589-L716
5,999
fracpete/python-weka-wrapper3
python/weka/flow/transformer.py
Train.do_execute
def do_execute(self): """ The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str """ if isinstance(self.input.payload, Instances): inst = None data = self.input.payload else: inst = ...
python
def do_execute(self): """ The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str """ if isinstance(self.input.payload, Instances): inst = None data = self.input.payload else: inst = ...
['def', 'do_execute', '(', 'self', ')', ':', 'if', 'isinstance', '(', 'self', '.', 'input', '.', 'payload', ',', 'Instances', ')', ':', 'inst', '=', 'None', 'data', '=', 'self', '.', 'input', '.', 'payload', 'else', ':', 'inst', '=', 'self', '.', 'input', '.', 'payload', 'data', '=', 'inst', '.', 'dataset', 'retrain', ...
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
['The', 'actual', 'execution', 'of', 'the', 'actor', '.']
train
https://github.com/fracpete/python-weka-wrapper3/blob/d850ab1bdb25fbd5a8d86e99f34a397975425838/python/weka/flow/transformer.py#L793-L843