body stringlengths 26 98.2k | body_hash int64 -9,222,864,604,528,158,000 9,221,803,474B | docstring stringlengths 1 16.8k | path stringlengths 5 230 | name stringlengths 1 96 | repository_name stringlengths 7 89 | lang stringclasses 1
value | body_without_docstring stringlengths 20 98.2k |
|---|---|---|---|---|---|---|---|
def setUp(self):
'Create runner object to run tests.'
from click.testing import CliRunner
self.cli_runner = CliRunner() | -5,648,910,060,475,704,000 | Create runner object to run tests. | tests/cmdline/commands/test_group.py | setUp | pranavmodx/aiida-core | python | def setUp(self):
from click.testing import CliRunner
self.cli_runner = CliRunner() |
def test_help(self):
'Tests help text for all group sub commands.'
options = ['--help']
result = self.cli_runner.invoke(group_list, options)
self.assertIsNone(result.exception, result.output)
self.assertIn('Usage', result.output)
result = self.cli_runner.invoke(group_create, options)
self.as... | -3,273,121,422,687,399,000 | Tests help text for all group sub commands. | tests/cmdline/commands/test_group.py | test_help | pranavmodx/aiida-core | python | def test_help(self):
options = ['--help']
result = self.cli_runner.invoke(group_list, options)
self.assertIsNone(result.exception, result.output)
self.assertIn('Usage', result.output)
result = self.cli_runner.invoke(group_create, options)
self.assertIsNone(result.exception, result.output)
... |
def test_create(self):
'Test `verdi group create` command.'
result = self.cli_runner.invoke(group_create, ['dummygroup5'])
self.assertClickResultNoException(result)
result = self.cli_runner.invoke(group_list)
self.assertClickResultNoException(result)
self.assertIn('dummygroup5', result.output) | 2,922,920,148,127,587,300 | Test `verdi group create` command. | tests/cmdline/commands/test_group.py | test_create | pranavmodx/aiida-core | python | def test_create(self):
result = self.cli_runner.invoke(group_create, ['dummygroup5'])
self.assertClickResultNoException(result)
result = self.cli_runner.invoke(group_list)
self.assertClickResultNoException(result)
self.assertIn('dummygroup5', result.output) |
def test_list(self):
'Test `verdi group list` command.'
result = self.cli_runner.invoke(group_list)
self.assertClickResultNoException(result)
for grp in ['dummygroup1', 'dummygroup2']:
self.assertIn(grp, result.output) | 907,058,038,534,539,100 | Test `verdi group list` command. | tests/cmdline/commands/test_group.py | test_list | pranavmodx/aiida-core | python | def test_list(self):
result = self.cli_runner.invoke(group_list)
self.assertClickResultNoException(result)
for grp in ['dummygroup1', 'dummygroup2']:
self.assertIn(grp, result.output) |
def test_copy(self):
'Test `verdi group copy` command.'
result = self.cli_runner.invoke(group_copy, ['dummygroup1', 'dummygroup2'])
self.assertClickResultNoException(result)
self.assertIn('Success', result.output) | 6,243,768,494,118,143,000 | Test `verdi group copy` command. | tests/cmdline/commands/test_group.py | test_copy | pranavmodx/aiida-core | python | def test_copy(self):
result = self.cli_runner.invoke(group_copy, ['dummygroup1', 'dummygroup2'])
self.assertClickResultNoException(result)
self.assertIn('Success', result.output) |
def test_delete(self):
'Test `verdi group delete` command.'
orm.Group(label='group_test_delete_01').store()
orm.Group(label='group_test_delete_02').store()
result = self.cli_runner.invoke(group_delete, ['--force', 'group_test_delete_01'])
self.assertClickResultNoException(result)
result = self.c... | -4,238,451,815,892,938,000 | Test `verdi group delete` command. | tests/cmdline/commands/test_group.py | test_delete | pranavmodx/aiida-core | python | def test_delete(self):
orm.Group(label='group_test_delete_01').store()
orm.Group(label='group_test_delete_02').store()
result = self.cli_runner.invoke(group_delete, ['--force', 'group_test_delete_01'])
self.assertClickResultNoException(result)
result = self.cli_runner.invoke(group_list)
sel... |
def test_show(self):
'Test `verdi group show` command.'
result = self.cli_runner.invoke(group_show, ['dummygroup1'])
self.assertClickResultNoException(result)
for grpline in ['Group label', 'dummygroup1', 'Group type_string', 'user', 'Group description', '<no description>']:
self.assertIn(grplin... | -4,439,619,605,897,400,300 | Test `verdi group show` command. | tests/cmdline/commands/test_group.py | test_show | pranavmodx/aiida-core | python | def test_show(self):
result = self.cli_runner.invoke(group_show, ['dummygroup1'])
self.assertClickResultNoException(result)
for grpline in ['Group label', 'dummygroup1', 'Group type_string', 'user', 'Group description', '<no description>']:
self.assertIn(grpline, result.output) |
def test_description(self):
'Test `verdi group description` command.'
description = 'It is a new description'
group = orm.load_group(label='dummygroup2')
self.assertNotEqual(group.description, description)
result = self.cli_runner.invoke(group_description, [group.label, description])
self.assert... | 7,618,296,786,687,864,000 | Test `verdi group description` command. | tests/cmdline/commands/test_group.py | test_description | pranavmodx/aiida-core | python | def test_description(self):
description = 'It is a new description'
group = orm.load_group(label='dummygroup2')
self.assertNotEqual(group.description, description)
result = self.cli_runner.invoke(group_description, [group.label, description])
self.assertClickResultNoException(result)
self.a... |
def test_relabel(self):
'Test `verdi group relabel` command.'
result = self.cli_runner.invoke(group_relabel, ['dummygroup4', 'relabeled_group'])
self.assertIsNone(result.exception, result.output)
result = self.cli_runner.invoke(group_list)
self.assertClickResultNoException(result)
self.assertNot... | 2,875,102,681,937,065,500 | Test `verdi group relabel` command. | tests/cmdline/commands/test_group.py | test_relabel | pranavmodx/aiida-core | python | def test_relabel(self):
result = self.cli_runner.invoke(group_relabel, ['dummygroup4', 'relabeled_group'])
self.assertIsNone(result.exception, result.output)
result = self.cli_runner.invoke(group_list)
self.assertClickResultNoException(result)
self.assertNotIn('dummygroup4', result.output)
... |
def test_add_remove_nodes(self):
'Test `verdi group remove-nodes` command.'
node_01 = orm.CalculationNode().store()
node_02 = orm.CalculationNode().store()
node_03 = orm.CalculationNode().store()
result = self.cli_runner.invoke(group_add_nodes, ['--force', '--group=dummygroup1', node_01.uuid])
s... | -3,826,414,208,446,320,000 | Test `verdi group remove-nodes` command. | tests/cmdline/commands/test_group.py | test_add_remove_nodes | pranavmodx/aiida-core | python | def test_add_remove_nodes(self):
node_01 = orm.CalculationNode().store()
node_02 = orm.CalculationNode().store()
node_03 = orm.CalculationNode().store()
result = self.cli_runner.invoke(group_add_nodes, ['--force', '--group=dummygroup1', node_01.uuid])
self.assertClickResultNoException(result)
... |
def test_copy_existing_group(self):
'Test user is prompted to continue if destination group exists and is not empty'
source_label = 'source_copy_existing_group'
dest_label = 'dest_copy_existing_group'
calc_s1 = orm.CalculationNode().store()
calc_s2 = orm.CalculationNode().store()
nodes_source_gr... | -7,379,491,617,463,558,000 | Test user is prompted to continue if destination group exists and is not empty | tests/cmdline/commands/test_group.py | test_copy_existing_group | pranavmodx/aiida-core | python | def test_copy_existing_group(self):
source_label = 'source_copy_existing_group'
dest_label = 'dest_copy_existing_group'
calc_s1 = orm.CalculationNode().store()
calc_s2 = orm.CalculationNode().store()
nodes_source_group = {str(node.uuid) for node in [calc_s1, calc_s2]}
source_group = orm.Gro... |
def forward(self, input):
'\n input: [B, N, latent_size + point_dim]\n :param latent_codes: [B, N, LATENT_CODE_DIM]\n :param points: [B, N, 3]\n :return: sdf_pred: [B, N]\n '
x = self.layers1(input)
x = torch.cat((x, input), dim=(- 1))
x = self.layers2(x)
return x | 1,390,690,253,077,486,000 | input: [B, N, latent_size + point_dim]
:param latent_codes: [B, N, LATENT_CODE_DIM]
:param points: [B, N, 3]
:return: sdf_pred: [B, N] | networks/sdf_net_decoder.py | forward | FrankieYin/master_project | python | def forward(self, input):
'\n input: [B, N, latent_size + point_dim]\n :param latent_codes: [B, N, LATENT_CODE_DIM]\n :param points: [B, N, 3]\n :return: sdf_pred: [B, N]\n '
x = self.layers1(input)
x = torch.cat((x, input), dim=(- 1))
x = self.layers2(x)
return x |
@search_blueprint.route('/table', methods=['POST'])
def search_table() -> Response:
'\n Parse the request arguments and call the helper method to execute a table search\n :return: a Response created with the results from the helper method\n '
try:
request_json = request.get_json()
searc... | 8,425,497,943,664,630,000 | Parse the request arguments and call the helper method to execute a table search
:return: a Response created with the results from the helper method | amundsen_application/api/search/v0.py | search_table | ai-platform/amundsenfrontendlibrary | python | @search_blueprint.route('/table', methods=['POST'])
def search_table() -> Response:
'\n Parse the request arguments and call the helper method to execute a table search\n :return: a Response created with the results from the helper method\n '
try:
request_json = request.get_json()
searc... |
@action_logging
def _search_table(*, search_term: str, page_index: int, filters: Dict, search_type: str) -> Dict[(str, Any)]:
"\n Call the search service endpoint and return matching results\n Search service logic defined here:\n https://github.com/lyft/amundsensearchlibrary/blob/master/search_service/api/... | 509,113,720,001,199,170 | Call the search service endpoint and return matching results
Search service logic defined here:
https://github.com/lyft/amundsensearchlibrary/blob/master/search_service/api/table.py
:return: a json output containing search results array as 'results' | amundsen_application/api/search/v0.py | _search_table | ai-platform/amundsenfrontendlibrary | python | @action_logging
def _search_table(*, search_term: str, page_index: int, filters: Dict, search_type: str) -> Dict[(str, Any)]:
"\n Call the search service endpoint and return matching results\n Search service logic defined here:\n https://github.com/lyft/amundsensearchlibrary/blob/master/search_service/api/... |
@search_blueprint.route('/user', methods=['GET'])
def search_user() -> Response:
'\n Parse the request arguments and call the helper method to execute a user search\n :return: a Response created with the results from the helper method\n '
try:
search_term = get_query_param(request.args, 'query'... | -3,573,663,369,142,424,000 | Parse the request arguments and call the helper method to execute a user search
:return: a Response created with the results from the helper method | amundsen_application/api/search/v0.py | search_user | ai-platform/amundsenfrontendlibrary | python | @search_blueprint.route('/user', methods=['GET'])
def search_user() -> Response:
'\n Parse the request arguments and call the helper method to execute a user search\n :return: a Response created with the results from the helper method\n '
try:
search_term = get_query_param(request.args, 'query'... |
@action_logging
def _search_user(*, search_term: str, page_index: int, search_type: str) -> Dict[(str, Any)]:
"\n Call the search service endpoint and return matching results\n Search service logic defined here:\n https://github.com/lyft/amundsensearchlibrary/blob/master/search_service/api/user.py\n\n :... | -5,010,729,810,745,006,000 | Call the search service endpoint and return matching results
Search service logic defined here:
https://github.com/lyft/amundsensearchlibrary/blob/master/search_service/api/user.py
:return: a json output containing search results array as 'results' | amundsen_application/api/search/v0.py | _search_user | ai-platform/amundsenfrontendlibrary | python | @action_logging
def _search_user(*, search_term: str, page_index: int, search_type: str) -> Dict[(str, Any)]:
"\n Call the search service endpoint and return matching results\n Search service logic defined here:\n https://github.com/lyft/amundsensearchlibrary/blob/master/search_service/api/user.py\n\n :... |
@_dispatch.add_dispatch_list
@tf_export('ignite_dataset')
def ignite_dataset(cache_name, host, port, local, part, page_size, schema, permutation, name=None):
'IgniteDataset that allows to get data from Apache Ignite.\n\n Apache Ignite is a memory-centric distributed database, caching, and processing\n platform fo... | 9,173,039,070,967,508,000 | IgniteDataset that allows to get data from Apache Ignite.
Apache Ignite is a memory-centric distributed database, caching, and processing
platform for transactional, analytical, and streaming workloads, delivering
in-memory speeds at petabyte scale. This contrib package contains an
integration between Apache Ignite an... | venv/lib/python3.7/site-packages/tensorflow_core/contrib/ignite/python/ops/gen_dataset_ops.py | ignite_dataset | aMp37/SimpleHTR | python | @_dispatch.add_dispatch_list
@tf_export('ignite_dataset')
def ignite_dataset(cache_name, host, port, local, part, page_size, schema, permutation, name=None):
'IgniteDataset that allows to get data from Apache Ignite.\n\n Apache Ignite is a memory-centric distributed database, caching, and processing\n platform fo... |
def ignite_dataset_eager_fallback(cache_name, host, port, local, part, page_size, schema, permutation, name=None, ctx=None):
'This is the slowpath function for Eager mode.\n This is for function ignite_dataset\n '
_ctx = (ctx if ctx else _context.context())
cache_name = _ops.convert_to_tensor(cache_name, ... | 5,534,718,431,688,911,000 | This is the slowpath function for Eager mode.
This is for function ignite_dataset | venv/lib/python3.7/site-packages/tensorflow_core/contrib/ignite/python/ops/gen_dataset_ops.py | ignite_dataset_eager_fallback | aMp37/SimpleHTR | python | def ignite_dataset_eager_fallback(cache_name, host, port, local, part, page_size, schema, permutation, name=None, ctx=None):
'This is the slowpath function for Eager mode.\n This is for function ignite_dataset\n '
_ctx = (ctx if ctx else _context.context())
cache_name = _ops.convert_to_tensor(cache_name, ... |
def speaker_access(f):
'\n Decorator that protects the view relative to a speaker.\n '
@functools.wraps(f)
def wrapper(request, slug, **kwargs):
spk = get_object_or_404(models.Speaker, slug=slug)
if (request.user.is_staff or (request.user == spk.user)):
full_access = True
... | 5,354,350,178,998,343,000 | Decorator that protects the view relative to a speaker. | conference/decorators.py | speaker_access | cezar77/epcon | python | def speaker_access(f):
'\n \n '
@functools.wraps(f)
def wrapper(request, slug, **kwargs):
spk = get_object_or_404(models.Speaker, slug=slug)
if (request.user.is_staff or (request.user == spk.user)):
full_access = True
talks = spk.talks()
else:
... |
def talk_access(f):
'\n Decorator that protects the view relative to a talk.\n '
@functools.wraps(f)
def wrapper(request, slug, **kwargs):
tlk = get_object_or_404(models.Talk, slug=slug)
if request.user.is_anonymous():
full_access = False
elif request.user.is_staff... | -8,703,531,422,658,072,000 | Decorator that protects the view relative to a talk. | conference/decorators.py | talk_access | cezar77/epcon | python | def talk_access(f):
'\n \n '
@functools.wraps(f)
def wrapper(request, slug, **kwargs):
tlk = get_object_or_404(models.Talk, slug=slug)
if request.user.is_anonymous():
full_access = False
elif request.user.is_staff:
full_access = True
else:
... |
def profile_access(f):
'\n Decorator which protect the relative view to a profile.\n '
@functools.wraps(f)
def wrapper(request, slug, **kwargs):
try:
profile = models.AttendeeProfile.objects.select_related('user').get(slug=slug)
except models.AttendeeProfile.DoesNotExist:
... | 6,321,148,472,076,626,000 | Decorator which protect the relative view to a profile. | conference/decorators.py | profile_access | cezar77/epcon | python | def profile_access(f):
'\n \n '
@functools.wraps(f)
def wrapper(request, slug, **kwargs):
try:
profile = models.AttendeeProfile.objects.select_related('user').get(slug=slug)
except models.AttendeeProfile.DoesNotExist:
raise http.Http404()
if (request.us... |
def flatten_reply(reply):
"Flatten node replies.\n\n Convert from a list of replies in this format::\n\n [{'example@example.com': reply},\n {'example@example.com': reply}]\n\n into this format::\n\n {'example@example.com': reply,\n 'example@example.com': reply}\n "
(nodes,... | -8,466,753,339,999,273,000 | Flatten node replies.
Convert from a list of replies in this format::
[{'example@example.com': reply},
{'example@example.com': reply}]
into this format::
{'example@example.com': reply,
'example@example.com': reply} | idps/lib/python3.7/site-packages/celery/app/control.py | flatten_reply | DTrafford/IDPS | python | def flatten_reply(reply):
"Flatten node replies.\n\n Convert from a list of replies in this format::\n\n [{'example@example.com': reply},\n {'example@example.com': reply}]\n\n into this format::\n\n {'example@example.com': reply,\n 'example@example.com': reply}\n "
(nodes,... |
def purge(self, connection=None):
'Discard all waiting tasks.\n\n This will ignore all tasks waiting for execution, and they will\n be deleted from the messaging server.\n\n Arguments:\n connection (kombu.Connection): Optional specific connection\n instance to use. If... | -8,160,378,701,582,752,000 | Discard all waiting tasks.
This will ignore all tasks waiting for execution, and they will
be deleted from the messaging server.
Arguments:
connection (kombu.Connection): Optional specific connection
instance to use. If not provided a connection will
be acquired from the connection pool.
Returns... | idps/lib/python3.7/site-packages/celery/app/control.py | purge | DTrafford/IDPS | python | def purge(self, connection=None):
'Discard all waiting tasks.\n\n This will ignore all tasks waiting for execution, and they will\n be deleted from the messaging server.\n\n Arguments:\n connection (kombu.Connection): Optional specific connection\n instance to use. If... |
def revoke(self, task_id, destination=None, terminate=False, signal=TERM_SIGNAME, **kwargs):
'Tell all (or specific) workers to revoke a task by id.\n\n If a task is revoked, the workers will ignore the task and\n not execute it after all.\n\n Arguments:\n task_id (str): Id of the ta... | 1,868,803,149,643,385,000 | Tell all (or specific) workers to revoke a task by id.
If a task is revoked, the workers will ignore the task and
not execute it after all.
Arguments:
task_id (str): Id of the task to revoke.
terminate (bool): Also terminate the process currently working
on the task (if any).
signal (str): Name of... | idps/lib/python3.7/site-packages/celery/app/control.py | revoke | DTrafford/IDPS | python | def revoke(self, task_id, destination=None, terminate=False, signal=TERM_SIGNAME, **kwargs):
'Tell all (or specific) workers to revoke a task by id.\n\n If a task is revoked, the workers will ignore the task and\n not execute it after all.\n\n Arguments:\n task_id (str): Id of the ta... |
def terminate(self, task_id, destination=None, signal=TERM_SIGNAME, **kwargs):
'Tell all (or specific) workers to terminate a task by id.\n\n See Also:\n This is just a shortcut to :meth:`revoke` with the terminate\n argument enabled.\n '
return self.revoke(task_id, destinati... | -4,554,651,076,549,802,000 | Tell all (or specific) workers to terminate a task by id.
See Also:
This is just a shortcut to :meth:`revoke` with the terminate
argument enabled. | idps/lib/python3.7/site-packages/celery/app/control.py | terminate | DTrafford/IDPS | python | def terminate(self, task_id, destination=None, signal=TERM_SIGNAME, **kwargs):
'Tell all (or specific) workers to terminate a task by id.\n\n See Also:\n This is just a shortcut to :meth:`revoke` with the terminate\n argument enabled.\n '
return self.revoke(task_id, destinati... |
def ping(self, destination=None, timeout=1.0, **kwargs):
"Ping all (or specific) workers.\n\n Returns:\n List[Dict]: List of ``{'hostname': reply}`` dictionaries.\n\n See Also:\n :meth:`broadcast` for supported keyword arguments.\n "
return self.broadcast('ping', reply... | -6,216,762,758,353,650,000 | Ping all (or specific) workers.
Returns:
List[Dict]: List of ``{'hostname': reply}`` dictionaries.
See Also:
:meth:`broadcast` for supported keyword arguments. | idps/lib/python3.7/site-packages/celery/app/control.py | ping | DTrafford/IDPS | python | def ping(self, destination=None, timeout=1.0, **kwargs):
"Ping all (or specific) workers.\n\n Returns:\n List[Dict]: List of ``{'hostname': reply}`` dictionaries.\n\n See Also:\n :meth:`broadcast` for supported keyword arguments.\n "
return self.broadcast('ping', reply... |
def rate_limit(self, task_name, rate_limit, destination=None, **kwargs):
"Tell workers to set a new rate limit for task by type.\n\n Arguments:\n task_name (str): Name of task to change rate limit for.\n rate_limit (int, str): The rate limit as tasks per second,\n or a ra... | 2,807,817,230,656,315,000 | Tell workers to set a new rate limit for task by type.
Arguments:
task_name (str): Name of task to change rate limit for.
rate_limit (int, str): The rate limit as tasks per second,
or a rate limit string (`'100/m'`, etc.
see :attr:`celery.task.base.Task.rate_limit` for
more information)... | idps/lib/python3.7/site-packages/celery/app/control.py | rate_limit | DTrafford/IDPS | python | def rate_limit(self, task_name, rate_limit, destination=None, **kwargs):
"Tell workers to set a new rate limit for task by type.\n\n Arguments:\n task_name (str): Name of task to change rate limit for.\n rate_limit (int, str): The rate limit as tasks per second,\n or a ra... |
def add_consumer(self, queue, exchange=None, exchange_type='direct', routing_key=None, options=None, destination=None, **kwargs):
"Tell all (or specific) workers to start consuming from a new queue.\n\n Only the queue name is required as if only the queue is specified\n then the exchange/routing key w... | -4,895,948,726,730,740,000 | Tell all (or specific) workers to start consuming from a new queue.
Only the queue name is required as if only the queue is specified
then the exchange/routing key will be set to the same name (
like automatic queues do).
Note:
This command does not respect the default queue/exchange
options in the configurat... | idps/lib/python3.7/site-packages/celery/app/control.py | add_consumer | DTrafford/IDPS | python | def add_consumer(self, queue, exchange=None, exchange_type='direct', routing_key=None, options=None, destination=None, **kwargs):
"Tell all (or specific) workers to start consuming from a new queue.\n\n Only the queue name is required as if only the queue is specified\n then the exchange/routing key w... |
def cancel_consumer(self, queue, destination=None, **kwargs):
'Tell all (or specific) workers to stop consuming from ``queue``.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('cancel_consumer', destination=destination, arguments={'queue': que... | 3,505,204,021,316,517,400 | Tell all (or specific) workers to stop consuming from ``queue``.
See Also:
Supports the same arguments as :meth:`broadcast`. | idps/lib/python3.7/site-packages/celery/app/control.py | cancel_consumer | DTrafford/IDPS | python | def cancel_consumer(self, queue, destination=None, **kwargs):
'Tell all (or specific) workers to stop consuming from ``queue``.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('cancel_consumer', destination=destination, arguments={'queue': que... |
def time_limit(self, task_name, soft=None, hard=None, destination=None, **kwargs):
'Tell workers to set time limits for a task by type.\n\n Arguments:\n task_name (str): Name of task to change time limits for.\n soft (float): New soft time limit (in seconds).\n hard (float): ... | -1,159,561,853,796,097,500 | Tell workers to set time limits for a task by type.
Arguments:
task_name (str): Name of task to change time limits for.
soft (float): New soft time limit (in seconds).
hard (float): New hard time limit (in seconds).
**kwargs (Any): arguments passed on to :meth:`broadcast`. | idps/lib/python3.7/site-packages/celery/app/control.py | time_limit | DTrafford/IDPS | python | def time_limit(self, task_name, soft=None, hard=None, destination=None, **kwargs):
'Tell workers to set time limits for a task by type.\n\n Arguments:\n task_name (str): Name of task to change time limits for.\n soft (float): New soft time limit (in seconds).\n hard (float): ... |
def enable_events(self, destination=None, **kwargs):
'Tell all (or specific) workers to enable events.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('enable_events', arguments={}, destination=destination, **kwargs) | -6,017,461,550,049,096,000 | Tell all (or specific) workers to enable events.
See Also:
Supports the same arguments as :meth:`broadcast`. | idps/lib/python3.7/site-packages/celery/app/control.py | enable_events | DTrafford/IDPS | python | def enable_events(self, destination=None, **kwargs):
'Tell all (or specific) workers to enable events.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('enable_events', arguments={}, destination=destination, **kwargs) |
def disable_events(self, destination=None, **kwargs):
'Tell all (or specific) workers to disable events.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('disable_events', arguments={}, destination=destination, **kwargs) | 3,649,242,235,164,667,000 | Tell all (or specific) workers to disable events.
See Also:
Supports the same arguments as :meth:`broadcast`. | idps/lib/python3.7/site-packages/celery/app/control.py | disable_events | DTrafford/IDPS | python | def disable_events(self, destination=None, **kwargs):
'Tell all (or specific) workers to disable events.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('disable_events', arguments={}, destination=destination, **kwargs) |
def pool_grow(self, n=1, destination=None, **kwargs):
'Tell all (or specific) workers to grow the pool by ``n``.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('pool_grow', arguments={'n': n}, destination=destination, **kwargs) | 1,891,126,562,088,961,500 | Tell all (or specific) workers to grow the pool by ``n``.
See Also:
Supports the same arguments as :meth:`broadcast`. | idps/lib/python3.7/site-packages/celery/app/control.py | pool_grow | DTrafford/IDPS | python | def pool_grow(self, n=1, destination=None, **kwargs):
'Tell all (or specific) workers to grow the pool by ``n``.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('pool_grow', arguments={'n': n}, destination=destination, **kwargs) |
def pool_shrink(self, n=1, destination=None, **kwargs):
'Tell all (or specific) workers to shrink the pool by ``n``.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('pool_shrink', arguments={'n': n}, destination=destination, **kwargs) | -3,755,088,330,896,770,600 | Tell all (or specific) workers to shrink the pool by ``n``.
See Also:
Supports the same arguments as :meth:`broadcast`. | idps/lib/python3.7/site-packages/celery/app/control.py | pool_shrink | DTrafford/IDPS | python | def pool_shrink(self, n=1, destination=None, **kwargs):
'Tell all (or specific) workers to shrink the pool by ``n``.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('pool_shrink', arguments={'n': n}, destination=destination, **kwargs) |
def autoscale(self, max, min, destination=None, **kwargs):
'Change worker(s) autoscale setting.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('autoscale', arguments={'max': max, 'min': min}, destination=destination, **kwargs) | -7,394,465,980,728,531,000 | Change worker(s) autoscale setting.
See Also:
Supports the same arguments as :meth:`broadcast`. | idps/lib/python3.7/site-packages/celery/app/control.py | autoscale | DTrafford/IDPS | python | def autoscale(self, max, min, destination=None, **kwargs):
'Change worker(s) autoscale setting.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`.\n '
return self.broadcast('autoscale', arguments={'max': max, 'min': min}, destination=destination, **kwargs) |
def shutdown(self, destination=None, **kwargs):
'Shutdown worker(s).\n\n See Also:\n Supports the same arguments as :meth:`broadcast`\n '
return self.broadcast('shutdown', arguments={}, destination=destination, **kwargs) | 3,164,932,352,739,790,000 | Shutdown worker(s).
See Also:
Supports the same arguments as :meth:`broadcast` | idps/lib/python3.7/site-packages/celery/app/control.py | shutdown | DTrafford/IDPS | python | def shutdown(self, destination=None, **kwargs):
'Shutdown worker(s).\n\n See Also:\n Supports the same arguments as :meth:`broadcast`\n '
return self.broadcast('shutdown', arguments={}, destination=destination, **kwargs) |
def pool_restart(self, modules=None, reload=False, reloader=None, destination=None, **kwargs):
'Restart the execution pools of all or specific workers.\n\n Keyword Arguments:\n modules (Sequence[str]): List of modules to reload.\n reload (bool): Flag to enable module reloading. Default... | -4,415,053,140,935,657,500 | Restart the execution pools of all or specific workers.
Keyword Arguments:
modules (Sequence[str]): List of modules to reload.
reload (bool): Flag to enable module reloading. Default is False.
reloader (Any): Function to reload a module.
destination (Sequence[str]): List of worker names to send this
... | idps/lib/python3.7/site-packages/celery/app/control.py | pool_restart | DTrafford/IDPS | python | def pool_restart(self, modules=None, reload=False, reloader=None, destination=None, **kwargs):
'Restart the execution pools of all or specific workers.\n\n Keyword Arguments:\n modules (Sequence[str]): List of modules to reload.\n reload (bool): Flag to enable module reloading. Default... |
def heartbeat(self, destination=None, **kwargs):
'Tell worker(s) to send a heartbeat immediately.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`\n '
return self.broadcast('heartbeat', arguments={}, destination=destination, **kwargs) | -2,943,589,852,549,582,300 | Tell worker(s) to send a heartbeat immediately.
See Also:
Supports the same arguments as :meth:`broadcast` | idps/lib/python3.7/site-packages/celery/app/control.py | heartbeat | DTrafford/IDPS | python | def heartbeat(self, destination=None, **kwargs):
'Tell worker(s) to send a heartbeat immediately.\n\n See Also:\n Supports the same arguments as :meth:`broadcast`\n '
return self.broadcast('heartbeat', arguments={}, destination=destination, **kwargs) |
def broadcast(self, command, arguments=None, destination=None, connection=None, reply=False, timeout=1.0, limit=None, callback=None, channel=None, pattern=None, matcher=None, **extra_kwargs):
'Broadcast a control command to the celery workers.\n\n Arguments:\n command (str): Name of command to sen... | -1,976,461,350,039,020,300 | Broadcast a control command to the celery workers.
Arguments:
command (str): Name of command to send.
arguments (Dict): Keyword arguments for the command.
destination (List): If set, a list of the hosts to send the
command to, when empty broadcast to all workers.
connection (kombu.Connection): ... | idps/lib/python3.7/site-packages/celery/app/control.py | broadcast | DTrafford/IDPS | python | def broadcast(self, command, arguments=None, destination=None, connection=None, reply=False, timeout=1.0, limit=None, callback=None, channel=None, pattern=None, matcher=None, **extra_kwargs):
'Broadcast a control command to the celery workers.\n\n Arguments:\n command (str): Name of command to sen... |
def _has_restrictions(ts):
'Determine whether the given task has restrictions and whether these\n restrictions are strict.\n '
return ((not ts.loose_restrictions) and (ts.host_restrictions or ts.worker_restrictions or ts.resource_restrictions)) | 3,853,626,670,157,284,000 | Determine whether the given task has restrictions and whether these
restrictions are strict. | distributed/stealing.py | _has_restrictions | ncclementi/distributed | python | def _has_restrictions(ts):
'Determine whether the given task has restrictions and whether these\n restrictions are strict.\n '
return ((not ts.loose_restrictions) and (ts.host_restrictions or ts.worker_restrictions or ts.resource_restrictions)) |
def _can_steal(thief, ts, victim):
'Determine whether worker ``thief`` can steal task ``ts`` from worker\n ``victim``.\n\n Assumes that `ts` has some restrictions.\n '
if (ts.host_restrictions and (get_address_host(thief.address) not in ts.host_restrictions)):
return False
elif (ts.worker_r... | -5,987,312,310,506,130,000 | Determine whether worker ``thief`` can steal task ``ts`` from worker
``victim``.
Assumes that `ts` has some restrictions. | distributed/stealing.py | _can_steal | ncclementi/distributed | python | def _can_steal(thief, ts, victim):
'Determine whether worker ``thief`` can steal task ``ts`` from worker\n ``victim``.\n\n Assumes that `ts` has some restrictions.\n '
if (ts.host_restrictions and (get_address_host(thief.address) not in ts.host_restrictions)):
return False
elif (ts.worker_r... |
async def start(self, scheduler=None):
'Start the background coroutine to balance the tasks on the cluster.\n Idempotent.\n The scheduler argument is ignored. It is merely required to satisify the\n plugin interface. Since this class is simultaneouly an extension, the\n scheudler instanc... | 8,660,697,406,665,815,000 | Start the background coroutine to balance the tasks on the cluster.
Idempotent.
The scheduler argument is ignored. It is merely required to satisify the
plugin interface. Since this class is simultaneouly an extension, the
scheudler instance is already registered during initialization | distributed/stealing.py | start | ncclementi/distributed | python | async def start(self, scheduler=None):
'Start the background coroutine to balance the tasks on the cluster.\n Idempotent.\n The scheduler argument is ignored. It is merely required to satisify the\n plugin interface. Since this class is simultaneouly an extension, the\n scheudler instanc... |
async def stop(self):
'Stop the background task balancing tasks on the cluster.\n This will block until all currently running stealing requests are\n finished. Idempotent\n '
pc = self.scheduler.periodic_callbacks.pop('stealing', None)
if pc:
pc.stop()
(await self._in_flight... | 6,955,667,300,840,942,000 | Stop the background task balancing tasks on the cluster.
This will block until all currently running stealing requests are
finished. Idempotent | distributed/stealing.py | stop | ncclementi/distributed | python | async def stop(self):
'Stop the background task balancing tasks on the cluster.\n This will block until all currently running stealing requests are\n finished. Idempotent\n '
pc = self.scheduler.periodic_callbacks.pop('stealing', None)
if pc:
pc.stop()
(await self._in_flight... |
def _to_dict(self, *, exclude: Container[str]=()) -> dict:
'\n A very verbose dictionary representation for debugging purposes.\n Not type stable and not inteded for roundtrips.\n\n Parameters\n ----------\n comm:\n exclude:\n A list of attributes which must not ... | 3,584,262,214,568,210,000 | A very verbose dictionary representation for debugging purposes.
Not type stable and not inteded for roundtrips.
Parameters
----------
comm:
exclude:
A list of attributes which must not be present in the output.
See also
--------
Client.dump_cluster_state | distributed/stealing.py | _to_dict | ncclementi/distributed | python | def _to_dict(self, *, exclude: Container[str]=()) -> dict:
'\n A very verbose dictionary representation for debugging purposes.\n Not type stable and not inteded for roundtrips.\n\n Parameters\n ----------\n comm:\n exclude:\n A list of attributes which must not ... |
def steal_time_ratio(self, ts):
'The compute to communication time ratio of a key\n\n Returns\n -------\n cost_multiplier: The increased cost from moving this task as a factor.\n For example a result of zero implies a task without dependencies.\n level: The location within a steal... | 4,686,170,437,953,275,000 | The compute to communication time ratio of a key
Returns
-------
cost_multiplier: The increased cost from moving this task as a factor.
For example a result of zero implies a task without dependencies.
level: The location within a stealable list to place this value | distributed/stealing.py | steal_time_ratio | ncclementi/distributed | python | def steal_time_ratio(self, ts):
'The compute to communication time ratio of a key\n\n Returns\n -------\n cost_multiplier: The increased cost from moving this task as a factor.\n For example a result of zero implies a task without dependencies.\n level: The location within a steal... |
def load_tf_weights_in_gpt_neo(model, config, gpt_neo_checkpoint_path):
'Load tf checkpoints in a pytorch model'
try:
import re
import tensorflow as tf
except ImportError:
logger.error('Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see https://www... | 8,552,888,941,187,628,000 | Load tf checkpoints in a pytorch model | src/stable_library_code/transformers/gpt_neo/modeling_gpt_neo.py | load_tf_weights_in_gpt_neo | Snarp/nostalgebraist-autoresponder | python | def load_tf_weights_in_gpt_neo(model, config, gpt_neo_checkpoint_path):
try:
import re
import tensorflow as tf
except ImportError:
logger.error('Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see https://www.tensorflow.org/install/ for installatio... |
def _split_heads(self, tensor, num_heads, attn_head_size):
'\n Splits hidden_size dim into attn_head_size and num_heads\n '
new_shape = (tensor.size()[:(- 1)] + (num_heads, attn_head_size))
tensor = tensor.view(*new_shape)
if (len(tensor.shape) == 5):
return tensor.permute(0, 1, 3,... | 3,453,971,312,782,584,300 | Splits hidden_size dim into attn_head_size and num_heads | src/stable_library_code/transformers/gpt_neo/modeling_gpt_neo.py | _split_heads | Snarp/nostalgebraist-autoresponder | python | def _split_heads(self, tensor, num_heads, attn_head_size):
'\n \n '
new_shape = (tensor.size()[:(- 1)] + (num_heads, attn_head_size))
tensor = tensor.view(*new_shape)
if (len(tensor.shape) == 5):
return tensor.permute(0, 1, 3, 2, 4)
elif (len(tensor.shape) == 4):
return... |
def _merge_heads(self, tensor, num_heads, attn_head_size):
'\n Merges attn_head_size dim and num_attn_heads dim into hidden_size\n '
if (len(tensor.shape) == 5):
tensor = tensor.permute(0, 1, 3, 2, 4).contiguous()
elif (len(tensor.shape) == 4):
tensor = tensor.permute(0, 2, 1, ... | -6,433,798,251,814,849,000 | Merges attn_head_size dim and num_attn_heads dim into hidden_size | src/stable_library_code/transformers/gpt_neo/modeling_gpt_neo.py | _merge_heads | Snarp/nostalgebraist-autoresponder | python | def _merge_heads(self, tensor, num_heads, attn_head_size):
'\n \n '
if (len(tensor.shape) == 5):
tensor = tensor.permute(0, 1, 3, 2, 4).contiguous()
elif (len(tensor.shape) == 4):
tensor = tensor.permute(0, 2, 1, 3).contiguous()
else:
raise ValueError(f'Input tensor... |
def _init_weights(self, module):
'Initialize the weights.'
if isinstance(module, (LazyLinearAPICompatible,)):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if (module.bias is not None):
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
... | 3,737,366,796,486,095,000 | Initialize the weights. | src/stable_library_code/transformers/gpt_neo/modeling_gpt_neo.py | _init_weights | Snarp/nostalgebraist-autoresponder | python | def _init_weights(self, module):
if isinstance(module, (LazyLinearAPICompatible,)):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if (module.bias is not None):
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.no... |
@add_start_docstrings_to_model_forward(GPT_NEO_INPUTS_DOCSTRING)
@add_code_sample_docstrings(tokenizer_class=_TOKENIZER_FOR_DOC, checkpoint=_CHECKPOINT_FOR_DOC, output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC)
def forward(self, input_ids=None, past_key_values=None, attention_mask=None, token... | 5,923,136,324,066,054,000 | labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
``labels = input_ids`` Indices are selected in ``[-100, 0, ..., config.vocab_size]`` All labels set to
``-100`` ar... | src/stable_library_code/transformers/gpt_neo/modeling_gpt_neo.py | forward | Snarp/nostalgebraist-autoresponder | python | @add_start_docstrings_to_model_forward(GPT_NEO_INPUTS_DOCSTRING)
@add_code_sample_docstrings(tokenizer_class=_TOKENIZER_FOR_DOC, checkpoint=_CHECKPOINT_FOR_DOC, output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC)
def forward(self, input_ids=None, past_key_values=None, attention_mask=None, token... |
@staticmethod
def _reorder_cache(past: Tuple[Tuple[torch.Tensor]], beam_idx: torch.Tensor) -> Tuple[Tuple[torch.Tensor]]:
'\n This function is used to re-order the :obj:`past_key_values` cache if\n :meth:`~transformers.PretrainedModel.beam_search` or :meth:`~transformers.PretrainedModel.beam_sample` i... | -8,147,205,206,558,354,000 | This function is used to re-order the :obj:`past_key_values` cache if
:meth:`~transformers.PretrainedModel.beam_search` or :meth:`~transformers.PretrainedModel.beam_sample` is
called. This is required to match :obj:`past_key_values` with the correct beam_idx at every generation step. | src/stable_library_code/transformers/gpt_neo/modeling_gpt_neo.py | _reorder_cache | Snarp/nostalgebraist-autoresponder | python | @staticmethod
def _reorder_cache(past: Tuple[Tuple[torch.Tensor]], beam_idx: torch.Tensor) -> Tuple[Tuple[torch.Tensor]]:
'\n This function is used to re-order the :obj:`past_key_values` cache if\n :meth:`~transformers.PretrainedModel.beam_search` or :meth:`~transformers.PretrainedModel.beam_sample` i... |
def is_type(obj):
'Returns True is obj is a reference to a type.\n\n >>> is_type(1)\n False\n\n >>> is_type(object)\n True\n\n >>> class Klass: pass\n >>> is_type(Klass)\n True\n '
return isinstance(obj, class_types) | 5,911,129,199,024,651,000 | Returns True is obj is a reference to a type.
>>> is_type(1)
False
>>> is_type(object)
True
>>> class Klass: pass
>>> is_type(Klass)
True | jsonpickle/util.py | is_type | antoinecollet5/Jsonpickle | python | def is_type(obj):
'Returns True is obj is a reference to a type.\n\n >>> is_type(1)\n False\n\n >>> is_type(object)\n True\n\n >>> class Klass: pass\n >>> is_type(Klass)\n True\n '
return isinstance(obj, class_types) |
def is_object(obj):
'Returns True is obj is a reference to an object instance.\n\n >>> is_object(1)\n True\n\n >>> is_object(object())\n True\n\n >>> is_object(lambda x: 1)\n False\n '
return (isinstance(obj, object) and (not isinstance(obj, (type, types.FunctionType, types.BuiltinFunctionT... | -2,526,840,915,339,155,000 | Returns True is obj is a reference to an object instance.
>>> is_object(1)
True
>>> is_object(object())
True
>>> is_object(lambda x: 1)
False | jsonpickle/util.py | is_object | antoinecollet5/Jsonpickle | python | def is_object(obj):
'Returns True is obj is a reference to an object instance.\n\n >>> is_object(1)\n True\n\n >>> is_object(object())\n True\n\n >>> is_object(lambda x: 1)\n False\n '
return (isinstance(obj, object) and (not isinstance(obj, (type, types.FunctionType, types.BuiltinFunctionT... |
def is_primitive(obj):
'Helper method to see if the object is a basic data type. Unicode strings,\n integers, longs, floats, booleans, and None are considered primitive\n and will return True when passed into *is_primitive()*\n\n >>> is_primitive(3)\n True\n >>> is_primitive([4,4])\n False\n '
... | 7,743,021,598,907,132,000 | Helper method to see if the object is a basic data type. Unicode strings,
integers, longs, floats, booleans, and None are considered primitive
and will return True when passed into *is_primitive()*
>>> is_primitive(3)
True
>>> is_primitive([4,4])
False | jsonpickle/util.py | is_primitive | antoinecollet5/Jsonpickle | python | def is_primitive(obj):
'Helper method to see if the object is a basic data type. Unicode strings,\n integers, longs, floats, booleans, and None are considered primitive\n and will return True when passed into *is_primitive()*\n\n >>> is_primitive(3)\n True\n >>> is_primitive([4,4])\n False\n '
... |
def is_enum(obj):
'Is the object an enum?'
return (('enum' in sys.modules) and isinstance(obj, sys.modules['enum'].Enum)) | 1,340,873,959,223,885,000 | Is the object an enum? | jsonpickle/util.py | is_enum | antoinecollet5/Jsonpickle | python | def is_enum(obj):
return (('enum' in sys.modules) and isinstance(obj, sys.modules['enum'].Enum)) |
def is_dictionary(obj):
"Helper method for testing if the object is a dictionary.\n\n >>> is_dictionary({'key':'value'})\n True\n\n "
return (type(obj) is dict) | 2,052,502,357,758,371,300 | Helper method for testing if the object is a dictionary.
>>> is_dictionary({'key':'value'})
True | jsonpickle/util.py | is_dictionary | antoinecollet5/Jsonpickle | python | def is_dictionary(obj):
"Helper method for testing if the object is a dictionary.\n\n >>> is_dictionary({'key':'value'})\n True\n\n "
return (type(obj) is dict) |
def is_sequence(obj):
'Helper method to see if the object is a sequence (list, set, or tuple).\n\n >>> is_sequence([4])\n True\n\n '
return (type(obj) in SEQUENCES_SET) | 2,095,652,466,627,002,400 | Helper method to see if the object is a sequence (list, set, or tuple).
>>> is_sequence([4])
True | jsonpickle/util.py | is_sequence | antoinecollet5/Jsonpickle | python | def is_sequence(obj):
'Helper method to see if the object is a sequence (list, set, or tuple).\n\n >>> is_sequence([4])\n True\n\n '
return (type(obj) in SEQUENCES_SET) |
def is_list(obj):
'Helper method to see if the object is a Python list.\n\n >>> is_list([4])\n True\n '
return (type(obj) is list) | 3,207,010,466,554,383,000 | Helper method to see if the object is a Python list.
>>> is_list([4])
True | jsonpickle/util.py | is_list | antoinecollet5/Jsonpickle | python | def is_list(obj):
'Helper method to see if the object is a Python list.\n\n >>> is_list([4])\n True\n '
return (type(obj) is list) |
def is_set(obj):
'Helper method to see if the object is a Python set.\n\n >>> is_set(set())\n True\n '
return (type(obj) is set) | -4,346,913,397,758,760,000 | Helper method to see if the object is a Python set.
>>> is_set(set())
True | jsonpickle/util.py | is_set | antoinecollet5/Jsonpickle | python | def is_set(obj):
'Helper method to see if the object is a Python set.\n\n >>> is_set(set())\n True\n '
return (type(obj) is set) |
def is_bytes(obj):
"Helper method to see if the object is a bytestring.\n\n >>> is_bytes(b'foo')\n True\n "
return (type(obj) is bytes) | -4,669,482,203,807,382,000 | Helper method to see if the object is a bytestring.
>>> is_bytes(b'foo')
True | jsonpickle/util.py | is_bytes | antoinecollet5/Jsonpickle | python | def is_bytes(obj):
"Helper method to see if the object is a bytestring.\n\n >>> is_bytes(b'foo')\n True\n "
return (type(obj) is bytes) |
def is_unicode(obj):
'Helper method to see if the object is a unicode string'
return (type(obj) is compat.ustr) | -5,009,300,965,617,864,000 | Helper method to see if the object is a unicode string | jsonpickle/util.py | is_unicode | antoinecollet5/Jsonpickle | python | def is_unicode(obj):
return (type(obj) is compat.ustr) |
def is_tuple(obj):
'Helper method to see if the object is a Python tuple.\n\n >>> is_tuple((1,))\n True\n '
return (type(obj) is tuple) | 5,996,855,661,813,377,000 | Helper method to see if the object is a Python tuple.
>>> is_tuple((1,))
True | jsonpickle/util.py | is_tuple | antoinecollet5/Jsonpickle | python | def is_tuple(obj):
'Helper method to see if the object is a Python tuple.\n\n >>> is_tuple((1,))\n True\n '
return (type(obj) is tuple) |
def is_dictionary_subclass(obj):
'Returns True if *obj* is a subclass of the dict type. *obj* must be\n a subclass and not the actual builtin dict.\n\n >>> class Temp(dict): pass\n >>> is_dictionary_subclass(Temp())\n True\n '
return (hasattr(obj, '__class__') and issubclass(obj.__class__, dict) ... | -8,526,177,384,186,611,000 | Returns True if *obj* is a subclass of the dict type. *obj* must be
a subclass and not the actual builtin dict.
>>> class Temp(dict): pass
>>> is_dictionary_subclass(Temp())
True | jsonpickle/util.py | is_dictionary_subclass | antoinecollet5/Jsonpickle | python | def is_dictionary_subclass(obj):
'Returns True if *obj* is a subclass of the dict type. *obj* must be\n a subclass and not the actual builtin dict.\n\n >>> class Temp(dict): pass\n >>> is_dictionary_subclass(Temp())\n True\n '
return (hasattr(obj, '__class__') and issubclass(obj.__class__, dict) ... |
def is_sequence_subclass(obj):
'Returns True if *obj* is a subclass of list, set or tuple.\n\n *obj* must be a subclass and not the actual builtin, such\n as list, set, tuple, etc..\n\n >>> class Temp(list): pass\n >>> is_sequence_subclass(Temp())\n True\n '
return (hasattr(obj, '__class__') a... | 6,032,409,357,767,384,000 | Returns True if *obj* is a subclass of list, set or tuple.
*obj* must be a subclass and not the actual builtin, such
as list, set, tuple, etc..
>>> class Temp(list): pass
>>> is_sequence_subclass(Temp())
True | jsonpickle/util.py | is_sequence_subclass | antoinecollet5/Jsonpickle | python | def is_sequence_subclass(obj):
'Returns True if *obj* is a subclass of list, set or tuple.\n\n *obj* must be a subclass and not the actual builtin, such\n as list, set, tuple, etc..\n\n >>> class Temp(list): pass\n >>> is_sequence_subclass(Temp())\n True\n '
return (hasattr(obj, '__class__') a... |
def is_noncomplex(obj):
'Returns True if *obj* is a special (weird) class, that is more complex\n than primitive data types, but is not a full object. Including:\n\n * :class:`~time.struct_time`\n '
if (type(obj) is time.struct_time):
return True
return False | -8,276,497,009,716,233,000 | Returns True if *obj* is a special (weird) class, that is more complex
than primitive data types, but is not a full object. Including:
* :class:`~time.struct_time` | jsonpickle/util.py | is_noncomplex | antoinecollet5/Jsonpickle | python | def is_noncomplex(obj):
'Returns True if *obj* is a special (weird) class, that is more complex\n than primitive data types, but is not a full object. Including:\n\n * :class:`~time.struct_time`\n '
if (type(obj) is time.struct_time):
return True
return False |
def is_function(obj):
'Returns true if passed a function\n\n >>> is_function(lambda x: 1)\n True\n\n >>> is_function(locals)\n True\n\n >>> def method(): pass\n >>> is_function(method)\n True\n\n >>> is_function(1)\n False\n '
function_types = {types.FunctionType, types.MethodType,... | 7,614,756,007,911,644,000 | Returns true if passed a function
>>> is_function(lambda x: 1)
True
>>> is_function(locals)
True
>>> def method(): pass
>>> is_function(method)
True
>>> is_function(1)
False | jsonpickle/util.py | is_function | antoinecollet5/Jsonpickle | python | def is_function(obj):
'Returns true if passed a function\n\n >>> is_function(lambda x: 1)\n True\n\n >>> is_function(locals)\n True\n\n >>> def method(): pass\n >>> is_function(method)\n True\n\n >>> is_function(1)\n False\n '
function_types = {types.FunctionType, types.MethodType,... |
def is_module_function(obj):
'Return True if `obj` is a module-global function\n\n >>> import os\n >>> is_module_function(os.path.exists)\n True\n\n >>> is_module_function(lambda: None)\n False\n\n '
return (hasattr(obj, '__class__') and isinstance(obj, (types.FunctionType, types.BuiltinFuncti... | -5,166,977,073,362,640,000 | Return True if `obj` is a module-global function
>>> import os
>>> is_module_function(os.path.exists)
True
>>> is_module_function(lambda: None)
False | jsonpickle/util.py | is_module_function | antoinecollet5/Jsonpickle | python | def is_module_function(obj):
'Return True if `obj` is a module-global function\n\n >>> import os\n >>> is_module_function(os.path.exists)\n True\n\n >>> is_module_function(lambda: None)\n False\n\n '
return (hasattr(obj, '__class__') and isinstance(obj, (types.FunctionType, types.BuiltinFuncti... |
def is_module(obj):
'Returns True if passed a module\n\n >>> import os\n >>> is_module(os)\n True\n\n '
return isinstance(obj, types.ModuleType) | -8,892,213,375,293,908,000 | Returns True if passed a module
>>> import os
>>> is_module(os)
True | jsonpickle/util.py | is_module | antoinecollet5/Jsonpickle | python | def is_module(obj):
'Returns True if passed a module\n\n >>> import os\n >>> is_module(os)\n True\n\n '
return isinstance(obj, types.ModuleType) |
def is_picklable(name, value):
"Return True if an object can be pickled\n\n >>> import os\n >>> is_picklable('os', os)\n True\n\n >>> def foo(): pass\n >>> is_picklable('foo', foo)\n True\n\n >>> is_picklable('foo', lambda: None)\n False\n\n "
if (name in tags.RESERVED):
retur... | -1,088,638,592,422,786,000 | Return True if an object can be pickled
>>> import os
>>> is_picklable('os', os)
True
>>> def foo(): pass
>>> is_picklable('foo', foo)
True
>>> is_picklable('foo', lambda: None)
False | jsonpickle/util.py | is_picklable | antoinecollet5/Jsonpickle | python | def is_picklable(name, value):
"Return True if an object can be pickled\n\n >>> import os\n >>> is_picklable('os', os)\n True\n\n >>> def foo(): pass\n >>> is_picklable('foo', foo)\n True\n\n >>> is_picklable('foo', lambda: None)\n False\n\n "
if (name in tags.RESERVED):
retur... |
def is_installed(module):
"Tests to see if ``module`` is available on the sys.path\n\n >>> is_installed('sys')\n True\n >>> is_installed('hopefullythisisnotarealmodule')\n False\n\n "
try:
__import__(module)
return True
except ImportError:
return False | -7,763,117,679,556,954,000 | Tests to see if ``module`` is available on the sys.path
>>> is_installed('sys')
True
>>> is_installed('hopefullythisisnotarealmodule')
False | jsonpickle/util.py | is_installed | antoinecollet5/Jsonpickle | python | def is_installed(module):
"Tests to see if ``module`` is available on the sys.path\n\n >>> is_installed('sys')\n True\n >>> is_installed('hopefullythisisnotarealmodule')\n False\n\n "
try:
__import__(module)
return True
except ImportError:
return False |
def is_reducible(obj):
'\n Returns false if of a type which have special casing,\n and should not have their __reduce__ methods used\n '
if (is_collections(obj) and (not isinstance(obj, collections.defaultdict))):
return True
if (type(obj) in NON_REDUCIBLE_TYPES):
return False
e... | 5,660,725,338,134,013,000 | Returns false if of a type which have special casing,
and should not have their __reduce__ methods used | jsonpickle/util.py | is_reducible | antoinecollet5/Jsonpickle | python | def is_reducible(obj):
'\n Returns false if of a type which have special casing,\n and should not have their __reduce__ methods used\n '
if (is_collections(obj) and (not isinstance(obj, collections.defaultdict))):
return True
if (type(obj) in NON_REDUCIBLE_TYPES):
return False
e... |
def in_dict(obj, key, default=False):
'\n Returns true if key exists in obj.__dict__; false if not in.\n If obj.__dict__ is absent, return default\n '
return ((key in obj.__dict__) if getattr(obj, '__dict__', None) else default) | 2,998,211,634,569,577,500 | Returns true if key exists in obj.__dict__; false if not in.
If obj.__dict__ is absent, return default | jsonpickle/util.py | in_dict | antoinecollet5/Jsonpickle | python | def in_dict(obj, key, default=False):
'\n Returns true if key exists in obj.__dict__; false if not in.\n If obj.__dict__ is absent, return default\n '
return ((key in obj.__dict__) if getattr(obj, '__dict__', None) else default) |
def in_slots(obj, key, default=False):
'\n Returns true if key exists in obj.__slots__; false if not in.\n If obj.__slots__ is absent, return default\n '
return ((key in obj.__slots__) if getattr(obj, '__slots__', None) else default) | -7,808,203,502,761,476,000 | Returns true if key exists in obj.__slots__; false if not in.
If obj.__slots__ is absent, return default | jsonpickle/util.py | in_slots | antoinecollet5/Jsonpickle | python | def in_slots(obj, key, default=False):
'\n Returns true if key exists in obj.__slots__; false if not in.\n If obj.__slots__ is absent, return default\n '
return ((key in obj.__slots__) if getattr(obj, '__slots__', None) else default) |
def has_reduce(obj):
'\n Tests if __reduce__ or __reduce_ex__ exists in the object dict or\n in the class dicts of every class in the MRO *except object*.\n\n Returns a tuple of booleans (has_reduce, has_reduce_ex)\n '
if ((not is_reducible(obj)) or is_type(obj)):
return (False, False)
i... | 6,952,400,020,244,919,000 | Tests if __reduce__ or __reduce_ex__ exists in the object dict or
in the class dicts of every class in the MRO *except object*.
Returns a tuple of booleans (has_reduce, has_reduce_ex) | jsonpickle/util.py | has_reduce | antoinecollet5/Jsonpickle | python | def has_reduce(obj):
'\n Tests if __reduce__ or __reduce_ex__ exists in the object dict or\n in the class dicts of every class in the MRO *except object*.\n\n Returns a tuple of booleans (has_reduce, has_reduce_ex)\n '
if ((not is_reducible(obj)) or is_type(obj)):
return (False, False)
i... |
def translate_module_name(module):
'Rename builtin modules to a consistent module name.\n\n Prefer the more modern naming.\n\n This is used so that references to Python\'s `builtins` module can\n be loaded in both Python 2 and 3. We remap to the "__builtin__"\n name and unmap it when importing.\n\n ... | 4,901,060,085,934,409,000 | Rename builtin modules to a consistent module name.
Prefer the more modern naming.
This is used so that references to Python's `builtins` module can
be loaded in both Python 2 and 3. We remap to the "__builtin__"
name and unmap it when importing.
Map the Python2 `exceptions` module to `builtins` because
`builtins` ... | jsonpickle/util.py | translate_module_name | antoinecollet5/Jsonpickle | python | def translate_module_name(module):
'Rename builtin modules to a consistent module name.\n\n Prefer the more modern naming.\n\n This is used so that references to Python\'s `builtins` module can\n be loaded in both Python 2 and 3. We remap to the "__builtin__"\n name and unmap it when importing.\n\n ... |
def untranslate_module_name(module):
'Rename module names mention in JSON to names that we can import\n\n This reverses the translation applied by translate_module_name() to\n a module name available to the current version of Python.\n\n '
module = _0_9_6_compat_untranslate(module)
lookup = (dict(b... | -6,730,269,842,887,094,000 | Rename module names mention in JSON to names that we can import
This reverses the translation applied by translate_module_name() to
a module name available to the current version of Python. | jsonpickle/util.py | untranslate_module_name | antoinecollet5/Jsonpickle | python | def untranslate_module_name(module):
'Rename module names mention in JSON to names that we can import\n\n This reverses the translation applied by translate_module_name() to\n a module name available to the current version of Python.\n\n '
module = _0_9_6_compat_untranslate(module)
lookup = (dict(b... |
def _0_9_6_compat_untranslate(module):
'Provide compatibility for pickles created with jsonpickle 0.9.6 and\n earlier, remapping `exceptions` and `__builtin__` to `builtins`.\n '
lookup = dict(__builtin__='builtins', exceptions='builtins')
return lookup.get(module, module) | 329,968,985,575,753,200 | Provide compatibility for pickles created with jsonpickle 0.9.6 and
earlier, remapping `exceptions` and `__builtin__` to `builtins`. | jsonpickle/util.py | _0_9_6_compat_untranslate | antoinecollet5/Jsonpickle | python | def _0_9_6_compat_untranslate(module):
'Provide compatibility for pickles created with jsonpickle 0.9.6 and\n earlier, remapping `exceptions` and `__builtin__` to `builtins`.\n '
lookup = dict(__builtin__='builtins', exceptions='builtins')
return lookup.get(module, module) |
def importable_name(cls):
"\n >>> class Example(object):\n ... pass\n\n >>> ex = Example()\n >>> importable_name(ex.__class__) == 'jsonpickle.util.Example'\n True\n >>> importable_name(type(25)) == 'builtins.int'\n True\n >>> importable_name(None.__class__) == 'builtins.NoneType'\n Tr... | 5,324,702,748,341,663,000 | >>> class Example(object):
... pass
>>> ex = Example()
>>> importable_name(ex.__class__) == 'jsonpickle.util.Example'
True
>>> importable_name(type(25)) == 'builtins.int'
True
>>> importable_name(None.__class__) == 'builtins.NoneType'
True
>>> importable_name(False.__class__) == 'builtins.bool'
True
>>> importable... | jsonpickle/util.py | importable_name | antoinecollet5/Jsonpickle | python | def importable_name(cls):
"\n >>> class Example(object):\n ... pass\n\n >>> ex = Example()\n >>> importable_name(ex.__class__) == 'jsonpickle.util.Example'\n True\n >>> importable_name(type(25)) == 'builtins.int'\n True\n >>> importable_name(None.__class__) == 'builtins.NoneType'\n Tr... |
def b64encode(data):
'\n Encode binary data to ascii text in base64. Data must be bytes.\n '
return base64.b64encode(data).decode('ascii') | 5,550,798,368,815,420,000 | Encode binary data to ascii text in base64. Data must be bytes. | jsonpickle/util.py | b64encode | antoinecollet5/Jsonpickle | python | def b64encode(data):
'\n \n '
return base64.b64encode(data).decode('ascii') |
def b64decode(payload):
'\n Decode payload - must be ascii text.\n '
return base64.b64decode(payload) | 3,385,305,434,666,332,000 | Decode payload - must be ascii text. | jsonpickle/util.py | b64decode | antoinecollet5/Jsonpickle | python | def b64decode(payload):
'\n \n '
return base64.b64decode(payload) |
def b85encode(data):
'\n Encode binary data to ascii text in base85. Data must be bytes.\n '
if PY2:
raise NotImplementedError("Python 2 can't encode data in base85.")
return base64.b85encode(data).decode('ascii') | 8,403,727,459,595,849,000 | Encode binary data to ascii text in base85. Data must be bytes. | jsonpickle/util.py | b85encode | antoinecollet5/Jsonpickle | python | def b85encode(data):
'\n \n '
if PY2:
raise NotImplementedError("Python 2 can't encode data in base85.")
return base64.b85encode(data).decode('ascii') |
def b85decode(payload):
'\n Decode payload - must be ascii text.\n '
if PY2:
raise NotImplementedError("Python 2 can't decode base85-encoded data.")
return base64.b85decode(payload) | -8,605,121,480,186,929,000 | Decode payload - must be ascii text. | jsonpickle/util.py | b85decode | antoinecollet5/Jsonpickle | python | def b85decode(payload):
'\n \n '
if PY2:
raise NotImplementedError("Python 2 can't decode base85-encoded data.")
return base64.b85decode(payload) |
def items(obj):
'Iterate over dicts in a deterministic order\n\n Python2 does not guarantee dict ordering, so this function\n papers over the difference in behavior. Python3 does guarantee\n dict order, without use of OrderedDict, so no sorting is needed there.\n\n '
if PY3_ORDERED_DICT:
fo... | -1,443,848,681,206,376,000 | Iterate over dicts in a deterministic order
Python2 does not guarantee dict ordering, so this function
papers over the difference in behavior. Python3 does guarantee
dict order, without use of OrderedDict, so no sorting is needed there. | jsonpickle/util.py | items | antoinecollet5/Jsonpickle | python | def items(obj):
'Iterate over dicts in a deterministic order\n\n Python2 does not guarantee dict ordering, so this function\n papers over the difference in behavior. Python3 does guarantee\n dict order, without use of OrderedDict, so no sorting is needed there.\n\n '
if PY3_ORDERED_DICT:
fo... |
def train_args():
"\n Retrieves and parses the 3 command line arguments provided by the user when\n they run the program from a terminal window. This function uses Python's\n argparse module to created and defined these 3 command line arguments. If\n the user fails to provide some or all... | 1,974,434,042,351,910,100 | Retrieves and parses the 3 command line arguments provided by the user when
they run the program from a terminal window. This function uses Python's
argparse module to created and defined these 3 command line arguments. If
the user fails to provide some or all of the 3 arguments, then the default
values are used for th... | train_args.py | train_args | victoray/ImageClasssifier | python | def train_args():
"\n Retrieves and parses the 3 command line arguments provided by the user when\n they run the program from a terminal window. This function uses Python's\n argparse module to created and defined these 3 command line arguments. If\n the user fails to provide some or all... |
def get_app(self):
' Overwrites method of AsyncHTTPTestCase.\n Returns:\n an instance of tornado application\n '
distributed_taskqueue = None
return appscale_taskqueue.prepare_taskqueue_application(task_queue=distributed_taskqueue) | 4,512,339,946,284,686,300 | Overwrites method of AsyncHTTPTestCase.
Returns:
an instance of tornado application | AppTaskQueue/test/unit/test_service_stats.py | get_app | HafeezRai/appscale | python | def get_app(self):
' Overwrites method of AsyncHTTPTestCase.\n Returns:\n an instance of tornado application\n '
distributed_taskqueue = None
return appscale_taskqueue.prepare_taskqueue_application(task_queue=distributed_taskqueue) |
def setUp(self):
' Patches handlers of Taskqueue application in order\n to prevent real calls to Cassandra and Datastore because only\n service statistics matters for this test.\n '
super(TestServiceStatistics, self).setUp()
handlers = [rest_api.RESTQueue, rest_api.RESTTask, rest_api.RESTLease, res... | -598,522,386,944,669,300 | Patches handlers of Taskqueue application in order
to prevent real calls to Cassandra and Datastore because only
service statistics matters for this test. | AppTaskQueue/test/unit/test_service_stats.py | setUp | HafeezRai/appscale | python | def setUp(self):
' Patches handlers of Taskqueue application in order\n to prevent real calls to Cassandra and Datastore because only\n service statistics matters for this test.\n '
super(TestServiceStatistics, self).setUp()
handlers = [rest_api.RESTQueue, rest_api.RESTTask, rest_api.RESTLease, res... |
def generate_extension(ext_def):
'Generate extension constructors.'
assert ('name' in ext_def), 'invalid extension name'
ext_path = (ext_def['name'].replace('.', os.path.sep) + '.pyx')
ext_root = os.path.dirname(ext_path)
ext_def['sources'] = [ext_path]
if ('extra_objects' in ext_def):
i... | -7,423,803,714,519,988,000 | Generate extension constructors. | setup.py | generate_extension | liuyenting/olive-camera-dcamapi | python | def generate_extension(ext_def):
assert ('name' in ext_def), 'invalid extension name'
ext_path = (ext_def['name'].replace('.', os.path.sep) + '.pyx')
ext_root = os.path.dirname(ext_path)
ext_def['sources'] = [ext_path]
if ('extra_objects' in ext_def):
if (not sys.platform.startswith('li... |
def test_simple_iter_clauses(self):
'Test basic expression iter_clauses functionality.'
b = BooleanExpression('A or B or C or D')
self.assertTrue(b.is_cnf)
self.assertTrue(b.is_dnf)
clauses = b.iter_clauses()
self.assertEqual(repr(next(clauses)), '<BooleanExpression "A or B or C or D">')
wit... | -6,528,639,060,011,016,000 | Test basic expression iter_clauses functionality. | tt/tests/unit/expressions/test_bexpr_iter_clauses.py | test_simple_iter_clauses | fkromer/tt | python | def test_simple_iter_clauses(self):
b = BooleanExpression('A or B or C or D')
self.assertTrue(b.is_cnf)
self.assertTrue(b.is_dnf)
clauses = b.iter_clauses()
self.assertEqual(repr(next(clauses)), '<BooleanExpression "A or B or C or D">')
with self.assertRaises(StopIteration):
next(cl... |
def test_simple_iter_cnf(self):
'Test basic expression iter_cnf_clauses functionality.'
b = BooleanExpression('(A or B) and (C or D) and (E or F)')
self.assertTrue(b.is_cnf)
self.assertFalse(b.is_dnf)
clauses = b.iter_cnf_clauses()
self.assertEqual(repr(next(clauses)), '<BooleanExpression "A or ... | -743,596,002,143,586,800 | Test basic expression iter_cnf_clauses functionality. | tt/tests/unit/expressions/test_bexpr_iter_clauses.py | test_simple_iter_cnf | fkromer/tt | python | def test_simple_iter_cnf(self):
b = BooleanExpression('(A or B) and (C or D) and (E or F)')
self.assertTrue(b.is_cnf)
self.assertFalse(b.is_dnf)
clauses = b.iter_cnf_clauses()
self.assertEqual(repr(next(clauses)), '<BooleanExpression "A or B">')
self.assertEqual(repr(next(clauses)), '<Boole... |
def test_simple_iter_dnf(self):
'Test basic expression iter_dnf_clauses functionality.'
b = BooleanExpression('(A and B) or (C and D) or (E and F)')
self.assertTrue(b.is_dnf)
self.assertFalse(b.is_cnf)
clauses = b.iter_dnf_clauses()
self.assertEqual(repr(next(clauses)), '<BooleanExpression "A an... | 5,606,984,770,953,174,000 | Test basic expression iter_dnf_clauses functionality. | tt/tests/unit/expressions/test_bexpr_iter_clauses.py | test_simple_iter_dnf | fkromer/tt | python | def test_simple_iter_dnf(self):
b = BooleanExpression('(A and B) or (C and D) or (E and F)')
self.assertTrue(b.is_dnf)
self.assertFalse(b.is_cnf)
clauses = b.iter_dnf_clauses()
self.assertEqual(repr(next(clauses)), '<BooleanExpression "A and B">')
self.assertEqual(repr(next(clauses)), '<Boo... |
def init(self, permits):
'\n Try to initialize this Semaphore instance with the given permit count.\n\n :param permits: (int), the given permit count.\n :return: (bool), ``true`` if initialization success.\n '
check_not_negative(permits, 'Permits cannot be negative!')
return self... | -4,474,863,822,055,226,400 | Try to initialize this Semaphore instance with the given permit count.
:param permits: (int), the given permit count.
:return: (bool), ``true`` if initialization success. | hazelcast/proxy/semaphore.py | init | Kilo59/hazelcast-python-client | python | def init(self, permits):
'\n Try to initialize this Semaphore instance with the given permit count.\n\n :param permits: (int), the given permit count.\n :return: (bool), ``true`` if initialization success.\n '
check_not_negative(permits, 'Permits cannot be negative!')
return self... |
def acquire(self, permits=1):
'\n Acquires one or specified amount of permits if available, and returns immediately, reducing the number of\n available permits by one or given amount.\n\n If insufficient permits are available then the current thread becomes disabled for thread scheduling purpos... | -3,143,300,385,678,641,700 | Acquires one or specified amount of permits if available, and returns immediately, reducing the number of
available permits by one or given amount.
If insufficient permits are available then the current thread becomes disabled for thread scheduling purposes
and lies dormant until one of following happens:
* some ... | hazelcast/proxy/semaphore.py | acquire | Kilo59/hazelcast-python-client | python | def acquire(self, permits=1):
'\n Acquires one or specified amount of permits if available, and returns immediately, reducing the number of\n available permits by one or given amount.\n\n If insufficient permits are available then the current thread becomes disabled for thread scheduling purpos... |
def available_permits(self):
'\n Returns the current number of permits currently available in this semaphore.\n\n * This method is typically used for debugging and testing purposes.\n :return: (int), the number of available permits in this semaphore.\n '
return self._encode_invok... | 2,166,608,214,720,584,700 | Returns the current number of permits currently available in this semaphore.
* This method is typically used for debugging and testing purposes.
:return: (int), the number of available permits in this semaphore. | hazelcast/proxy/semaphore.py | available_permits | Kilo59/hazelcast-python-client | python | def available_permits(self):
'\n Returns the current number of permits currently available in this semaphore.\n\n * This method is typically used for debugging and testing purposes.\n :return: (int), the number of available permits in this semaphore.\n '
return self._encode_invok... |
def drain_permits(self):
'\n Acquires and returns all permits that are immediately available.\n\n :return: (int), the number of permits drained.\n '
return self._encode_invoke(semaphore_drain_permits_codec) | 294,431,509,549,184,200 | Acquires and returns all permits that are immediately available.
:return: (int), the number of permits drained. | hazelcast/proxy/semaphore.py | drain_permits | Kilo59/hazelcast-python-client | python | def drain_permits(self):
'\n Acquires and returns all permits that are immediately available.\n\n :return: (int), the number of permits drained.\n '
return self._encode_invoke(semaphore_drain_permits_codec) |
def reduce_permits(self, reduction):
'\n Shrinks the number of available permits by the indicated reduction. This method differs from acquire in that it\n does not block waiting for permits to become available.\n\n :param reduction: (int), the number of permits to remove.\n '
check_... | -908,642,113,753,615,700 | Shrinks the number of available permits by the indicated reduction. This method differs from acquire in that it
does not block waiting for permits to become available.
:param reduction: (int), the number of permits to remove. | hazelcast/proxy/semaphore.py | reduce_permits | Kilo59/hazelcast-python-client | python | def reduce_permits(self, reduction):
'\n Shrinks the number of available permits by the indicated reduction. This method differs from acquire in that it\n does not block waiting for permits to become available.\n\n :param reduction: (int), the number of permits to remove.\n '
check_... |
def release(self, permits=1):
'\n Releases one or given number of permits, increasing the number of available permits by one or that amount.\n\n There is no requirement that a thread that releases a permit must have acquired that permit by calling one of\n the acquire methods. Correct usage of ... | -1,641,383,373,656,564,200 | Releases one or given number of permits, increasing the number of available permits by one or that amount.
There is no requirement that a thread that releases a permit must have acquired that permit by calling one of
the acquire methods. Correct usage of a semaphore is established by programming convention in the appl... | hazelcast/proxy/semaphore.py | release | Kilo59/hazelcast-python-client | python | def release(self, permits=1):
'\n Releases one or given number of permits, increasing the number of available permits by one or that amount.\n\n There is no requirement that a thread that releases a permit must have acquired that permit by calling one of\n the acquire methods. Correct usage of ... |
def try_acquire(self, permits=1, timeout=0):
'\n Tries to acquire one or the given number of permits, if they are available, and returns immediately, with the\n value ``true``, reducing the number of available permits by the given amount.\n\n If there are insufficient permits and a timeout is p... | -9,216,870,520,673,875,000 | Tries to acquire one or the given number of permits, if they are available, and returns immediately, with the
value ``true``, reducing the number of available permits by the given amount.
If there are insufficient permits and a timeout is provided, the current thread becomes disabled for thread
scheduling purposes and... | hazelcast/proxy/semaphore.py | try_acquire | Kilo59/hazelcast-python-client | python | def try_acquire(self, permits=1, timeout=0):
'\n Tries to acquire one or the given number of permits, if they are available, and returns immediately, with the\n value ``true``, reducing the number of available permits by the given amount.\n\n If there are insufficient permits and a timeout is p... |
def __init__(self, node='', rpcuser=None, rpcpassword=None, debug=False, data_refresh_time_seconds=900, **kwargs):
'Init crea\n\n :param str node: Node to connect to *(optional)*\n :param str rpcuser: RPC user *(optional)*\n :param str rpcpassword: RPC password *(optional)*\n ... | 8,843,852,646,283,711,000 | Init crea
:param str node: Node to connect to *(optional)*
:param str rpcuser: RPC user *(optional)*
:param str rpcpassword: RPC password *(optional)*
:param bool nobroadcast: Do **not** broadcast a transaction!
*(optional)*
:param bool unsigned: Do **not** sign a transaction! *(optional)*
:param bool debug: Enabl... | crea/crea.py | __init__ | creativechain/crea-python-lib | python | def __init__(self, node=, rpcuser=None, rpcpassword=None, debug=False, data_refresh_time_seconds=900, **kwargs):
'Init crea\n\n :param str node: Node to connect to *(optional)*\n :param str rpcuser: RPC user *(optional)*\n :param str rpcpassword: RPC password *(optional)*\n ... |
def connect(self, node='', rpcuser='', rpcpassword='', **kwargs):
' Connect to Crea network (internal use only)\n '
if (not node):
node = self.get_default_nodes()
if (not bool(node)):
raise ValueError('A Crea node needs to be provided!')
if ((not rpcuser) and ('rpcuser' in... | 3,630,909,131,526,738,400 | Connect to Crea network (internal use only) | crea/crea.py | connect | creativechain/crea-python-lib | python | def connect(self, node=, rpcuser=, rpcpassword=, **kwargs):
' \n '
if (not node):
node = self.get_default_nodes()
if (not bool(node)):
raise ValueError('A Crea node needs to be provided!')
if ((not rpcuser) and ('rpcuser' in config)):
rpcuser = config['rpcuser']
... |
def is_connected(self):
'Returns if rpc is connected'
return (self.rpc is not None) | -5,560,205,291,248,361,000 | Returns if rpc is connected | crea/crea.py | is_connected | creativechain/crea-python-lib | python | def is_connected(self):
return (self.rpc is not None) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.