text stringlengths 48 9.16k |
|---|
def zunionstore(self, dest, keys, aggregate=None):
"""
Union multiple sorted sets specified by ``keys`` into
a new sorted set, ``dest``. Scores in the destination will be
aggregated based on the ``aggregate``, or SUM if none is provided.
"""
return self._zaggregate('ZUNIONSTORE', dest, keys, agg... |
def wait(self, num_replicas, timeout):
"""
Redis synchronous replication
That returns the number of replicas that processed the query when
we finally have at least ``num_replicas``, or when the ``timeout`` was
reached.
"""
return self.execute_command('WAIT', num_replicas, timeout) |
def tab_insert(self, e): # (M-TAB)
'''Insert a tab character. '''
cursor = min(self.l_buffer.point, len(self.l_buffer.line_buffer))
ws = ' ' * (self.tabstop - (cursor % self.tabstop))
self.insert_text(ws) |
def update(self, headers):
"This method is a public interface for a throttle storage class"
self.prune()
if 'x-throttling-control' not in headers:
return
status = self.parse_throttle(headers['x-throttling-control'])
retry_after = int(headers.get('retry-after', 0))
sql, values = self.con... |
def brpoplpush(self, src, dst, timeout=0):
"""
Pop a value off the tail of ``src``, push it on the head of ``dst``
and then return it.
This command blocks until a value is in ``src`` or until ``timeout``
seconds elapse, whichever is first. A ``timeout`` value of 0 blocks
forever.
"""
if... |
def __init__(self, parent = None):
"""Creates a filter instance, allowing applications to set the
parent on instantiation."""
XMLReader.__init__(self)
self._parent = parent |
def sendData(sendstr,commandId):
HEAD_0 = chr(0)
HEAD_1 = chr(0)
HEAD_2 = chr(0)
HEAD_3 = chr(0)
ProtoVersion = chr(0)
ServerVersion = 0
sendstr = sendstr
data = struct.pack('!sssss3I',HEAD_0,HEAD_1,HEAD_2,\
HEAD_3,ProtoVersion,ServerVersion,\
... |
def inner(request, id, slug=''):
instance = get_object_or_404(model, pk=id)
if not request.path == instance.get_absolute_url():
return redirect(instance, permanent=True)
return view(request, instance) |
def __init__(self):
self.allow_ctrl_c = False
self.ctrl_c_tap_time_interval = 0.3
self.debug = False
self.bell_style = 'none'
self.mark = -1
self.console=MockConsole()
self.disable_readline = False
# this code needs to follow l_buffer and history creation
self.editingmodes = [mode(s... |
def __init__(self, items=None):
r"""Creates a new SQLQuery.
>>> SQLQuery("x")
<sql: 'x'>
>>> q = SQLQuery(['SELECT * FROM ', 'test', ' WHERE x=', SQLParam(1)])
>>> q
<sql: 'SELECT * FROM test WHERE x=1'>
>>> q.query(), q.values()
('SELECT * FROM test WHER... |
def getTerminalSize():
import os
from os import environ as env
def ioctl_GWINSZ(fd):
try:
import fcntl, termios, struct, os
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
'1234'))
except:
return
return cr
cr = ioc... |
def get_command_class(command):
"""
Called from runCLI() to select the command class for the selected command.
:param command: The command to be implemented
:return: The command class corresponding to the selected command
"""
from scrapple.commands import genconfig, generate, run, web
cmdCl... |
@contextmanager
def mock_launchctl(self, exists=True):
from mock import patch
patch1 = patch('subprocess.check_output')
with patch1 as check_output:
if exists:
check_output.side_effect = mock_check_output
else:
check_output.side_effect = IOError(2, "err")
yie... |
def _clone(self, *args, **kwargs):
c = super(EasyQuerySet, self)._clone(*args, **kwargs)
c._easymodel = self._easymodel
return c |
def setFilename(self, filename):
if filename:
self.hasFilename = True
self.filenameValue = filename |
def get_root():
# we require that all commands are run from the project root, i.e. the
# directory that contains setup.py, setup.cfg, and versioneer.py .
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py"... |
@classmethod
def get_host_with_role(cls, keyname, role):
"""Searches through the local metadata to see which virtual machine runs the
specified role.
Args:
keyname: The SSH keypair name that uniquely identifies this AppScale
deployment.
role: A str indicating the role to search for.
Returns:
... |
def test_module(self):
serialized = _serialize_func(reads_module)
glbs = self.make_globals()
f = _deserialize_func(serialized, glbs)
self.assertEqual(f(), sys.version) |
def removeNodes(self, numNodes=1, preemptable=False):
self._pick(preemptable).removeNodes(numNodes=numNodes) |
def testDestroyWithEC2EnvironmentVariables(self):
# if the user wants us to use their EC2 credentials when running AppScale,
# we should make sure they get set
appscale = AppScale()
# Mock out the actual file reading itself, and slip in a YAML-dumped
# file
contents = {
'infrastructure' : 'ec2',
'm... |
def _create_container(self, prefix='container'):
container_name = self._get_container_reference(prefix)
self.service.create_container(container_name)
return container_name |
@contextmanager
def writeSharedFileStream(self, sharedFileName, isProtected=None):
assert self._validateSharedFileName(sharedFileName)
info = self.FileInfo.loadOrCreate(jobStoreFileID=self._sharedFileID(sharedFileName),
ownerID=str(self.sharedFileOwnerID),
... |
def __init__(self, wrapped_file):
self.wrapped_file = wrapped_file |
def _parse_lease_time(response):
'''
Extracts lease time return header.
'''
raw_headers = _parse_response_for_dict(response)
lease_time = raw_headers.get('x-ms-lease-time')
if lease_time:
lease_time = _int_to_str(lease_time)
return lease_time |
def removeNodes(self, numNodes=1):
while len(self.workers) > 0 and numNodes > 0:
worker = self.workers.pop()
self.totalWorkerTime += worker.stop()
numNodes -= 1 |
def decorateTitle(title, options):
""" Add a marker to TITLE if the TITLE is sorted on.
"""
if title.lower() == options.sortCategory:
return "%s*" % title
else:
return title |
def clean(self, jobCache=None):
"""
Function to cleanup the state of a job store after a restart.
Fixes jobs that might have been partially updated. Resets the try counts and removes jobs
that are not successors of the current root job.
:param dict[str,toil.jobWrapper.JobWrapper] jobCache: if a val... |
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub ... |
def db_type(self, connection):
self._check_database_backend(connection.alias)
return 'ltree' |
def visit_FunctionDef(self, node):
args = [arg.id for arg in node.args.args]
self.signatures.append((node.name, args[1:])) |
@wraps(fn)
def pretty_dump_wrapper(*args, **kwargs):
response.content_type = "application/json; charset=utf-8"
return json.dumps(
fn(*args, **kwargs),
# sort_keys=True,
indent=4,
separators=(',', ': ')
) |
def test_log_rank_returns_None_if_equal_arrays():
T = np.random.exponential(5, size=200)
result = stats.logrank_test(T, T, alpha=0.95)
assert not result.is_significant
C = np.random.binomial(2, 0.8, size=200)
result = stats.logrank_test(T, T, C, C, alpha=0.95)
assert not result.is_significant |
def prepareSbatch(self, cpu, mem, jobID):
# Returns the sbatch command line before the script to run
sbatch_line = ['sbatch', '-Q', '-J', 'toil_job_{}'.format(jobID)]
if self.boss.environment:
for k, v in self.boss.environment.iteritems():
quoted_value = quote(os.environ[k] if v is Non... |
def test_machine_not_set_in_cloud_deployments(self):
# when running in a cloud infrastructure, we need to know what
# machine image to use
argv = ['--min', '1', '--max', '1', "--infrastructure", "euca"]
self.assertRaises(BadConfigurationException, ParseArgs, argv,
"appscale-run-instances") |
def __str__(self):
parts = self.args[0].split('\n\n\t')
return parts[0] + '\n\n\t' + _dequote(repr(parts[1])) |
def ridge_regression(X, Y, c1=0.0, c2=0.0, offset=None):
"""
Also known as Tikhonov regularization. This solves the minimization problem:
min_{beta} ||(beta X - Y)||^2 + c1||beta||^2 + c2||beta - offset||^2
One can find more information here: http://en.wikipedia.org/wiki/Tikhonov_regularization
P... |
def _write_buffer(self, s):
self.buffer = s+self.buffer |
def match_and_register(f_lon_map, lon, lon_thresh, out_template, clobber):
files = [k for k, v in f_lon_map.items() if
lon_offset(v, lon) < lon_thresh]
register(files, lon, out_template % lon, clobber) |
def read_crypto_pad4(self, s):
# discard data
return self.read_crypto_block4done() |
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
refnames = variables["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("variables are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
refs ... |
def has_data(self, index):
return index not in self.holes and index not in self.blocked_holes |
def define_sample(self, md5, sha1, sha256, ruleset_name, rule_name, notificaiton_date, first_seen, detection_ratio, size):
self.md5 = md5
self.sha1 = sha1
self.sha256 = sha256
self.ruleset_name = ruleset_name
self.rule_name = rule_name
self.notificaiton_date = notificaiton_date
self.first_seen = first_seen
self... |
def getformat(self):
format_word = 0
if self.flags and self.format_flags:
for flag, bit in self.format_flags:
if self.flags.has_key(flag):
format_word = format_word & (0x01 << bit)
return format_word |
def __init__(self, output_vars, *args, **kwargs):
output_vars = self.replicate_vars(output_vars)
_, _, replaced_vars = self._get_bn_params(output_vars)
super(ApproxTestMonitoring, self).__init__(replaced_vars, *args,
**kwargs) |
def main():
import_mods(".")
subs = list(subclasses(entities.Entity))
# print("\n".join([str(x) for x in subs]))
# print(len(subs))
no_namespace = [x for x in subs if not filter_has_namespace(x)]
for x in no_namespace:
print(x)
print(len(no_namespace)) |
def read_and_compile_pattern_from_file(config_file):
"""Read INI config file and compile robots regex from robots key in
patterns section.
"""
config = ConfigParser.ConfigParser()
config.readfp(open(config_file))
re_robots = re.compile(config.get("patterns", "robots"), re.I)
return re_robots |
def __init__(self, cache=False):
rrulebase.__init__(self, cache)
self._rrule = []
self._rdate = []
self._exrule = []
self._exdate = [] |
def exportChildren(self, lwrite, level, namespace_='ArtifactObj:', name_='ArtifactObjectType', fromsubclass_=False, pretty_print=True):
super(ArtifactObjectType, self).exportChildren(lwrite, level, 'ArtifactObj:', name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol... |
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Noisiness':
text_ = child_.text
text_ = self.gds_validate_string(text_, node, 'Noisiness')
self.set_Noisiness(text_)
elif nodeName_ == 'Ease_of_Evasion':
text_ = child_.text
text_ = se... |
@synchronized(mlock)
def cancelScannerSubscription(self, tickerId):
""" generated source for method cancelScannerSubscription """
# not connected?
if not self.m_connected:
self.error(EClientErrors.NO_VALID_ID, EClientErrors.NOT_CONNECTED, "")
return
if self.m_serverVersion < 24:
... |
def exportAttributes(self, lwrite, level, already_processed, namespace_='cybox:', name_='Observables'):
if self.cybox_major_version is not None:
lwrite(' cybox_major_version=%s' % (self.gds_format_string(quote_attrib(self.cybox_major_version), input_name='cybox_major_version'), ))
if self.cybox_minor_v... |
def tearDown(self):
resp = self.client.posts(self.params['username'])
for i in range(self.toDelete):
self.client.delete_post(self.params['username'], resp['posts'][i]['id'])
self.toDelete -= 1 |
def __init__(self, has_changed=None, idref=None, id=None, State=None, Description=None, Properties=None, Domain_Specific_Object_Properties=None, Location=None, Related_Objects=None, Defined_Effect=None, Discovery_Method=None, extensiontype_=None):
self.has_changed = _cast(bool, has_changed)
self.idref = _cast(N... |
def exportAttributes(self, lwrite, level, already_processed, namespace_='WinMemoryPageRegionObj:', name_='MemoryPageTypeType'):
super(MemoryPageTypeType, self).exportAttributes(lwrite, level, already_processed, namespace_, name_='MemoryPageTypeType')
if self.datatype is not None:
lwrite(' datatype=%s' ... |
def generate_extensions(self, extensions, enums, functions):
f = self._f_gl
write = set()
written = set(enum.name for enum in enums) | \
set(function.proto.name for function in functions)
f.write('# Extensions\nvar\n')
for ext in extensions:
if self.spec.NAME == 'gl' and not ... |
@classmethod
def _delete(cls, args):
api._timeout = args.timeout
# TODO CHECK
res = api.Monitor.delete(args.monitor_id)
if res is not None:
report_warnings(res)
report_errors(res) |
@classmethod
def setup_parser(cls, subparsers):
parser = subparsers.add_parser('tag', help="View and modify host tags.")
verb_parsers = parser.add_subparsers(title='Verbs', dest='verb')
verb_parsers.required = True
add_parser = verb_parsers.add_parser('add', help="Add a host to one or more tags.",
... |
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Show_Message_Body':
obj_ = cybox_common.StringObjectPropertyType.factory()
obj_.build(child_)
self.set_Show_Message_Body(obj_)
elif nodeName_ == 'Show_Message_Title':
obj_ = cybox_common.Strin... |
def handle_data(self, data):
if self.in_a:
self.text += data |
def wait_for_visible(self, locator):
"""
Synchronization to deal with elements that are present, but are disabled until some action
triggers their visibility.
:raises: ElementVisiblityTimeout
"""
for i in range(timeout_seconds):
try:
if self.driver.is_visible(locator):
... |
def grade(tid, answer):
if answer.find("cr4zy_p4ssw0rds") != -1:
return { "correct": True, "message": "Maybe sticking everything in the HTML source wasn't exactly the best idea." }
return { "correct": False, "message": "Nope, that's not quite right." } |
def _make_dict_with_path( srcfiles ):
return dict( [( srcfile.split(u".txt")[0], mk_path( srcfile ) ) \
for srcfile in srcfiles] ) |
@blueprint.route("/get", methods=["GET"])
@api_wrapper
def updates_get_hook():
updates = api.updates.get_all()
user = None
if api.auth.is_logged_in(): user = api.user.get_user()
data = []
for update in updates:
u = {
"title": update["title"],
"timestamp": update["timestamp"],
"content": update["content"... |
def serialize_groups(self, groups, path, portal=None):
""" Serialize groups to CSV. """
groups_copy = copy.deepcopy(groups)
field_names = GROUP_EXTRACT_PROPERTIES
if self.thumbnails:
if not portal:
raise PortalError('The "portal" argument is required to '\
... |
def __init__(self, namenode_host, namenode_port, hdfs_username):
self.namenode_host=namenode_host
self.namenode_port = namenode_port
self.username = hdfs_username |
def visit_MarkSafe(self, node, frame):
self.write('Markup(')
self.visit(node.expr, frame)
self.write(')') |
def has_safe_repr(value):
"""Does the node have a safe representation?"""
if value is None or value is NotImplemented or value is Ellipsis:
return True
if isinstance(value, (bool, int, float, complex, range_type,
Markup) + string_types):
return True
if isinstance(value, (tupl... |
def exportChildren(self, lwrite, level, namespace_='HTTPSessionObj:', name_='HostFieldType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Domain_Name is not None:
self.Domain_Name.export(lwrite, level, 'HTTPSessionObj:', name_='Domain_... |
@ended.setter
def ended(self, end):
self.__end = end |
def interpret(marker, environment=None, override=None):
return compile(marker)() |
@pytest.fixture(scope="module", params=cfts)
def testenv(request):
testenv = dict()
testenv['cf_conn'] = boto.cloudformation.connect_to_region(region_for_tests)
testenv['cft'] = request.param
return testenv |
@silencestderr
def test_absolute_requests(self):
def asserting_app(environ, start_response):
assert environ['HTTP_HOST'] == 'surelynotexisting.example.com:1337'
assert environ['PATH_INFO'] == '/index.htm'
assert environ['SERVER_PORT'] == addr.split(':')[1]
start_response('200 OK', [(... |
def _destinsrc(src, dst):
src = abspath(src)
dst = abspath(dst)
if not src.endswith(os.path.sep):
src += os.path.sep
if not dst.endswith(os.path.sep):
dst += os.path.sep
return dst.startswith(src) |
def flag(imap_account, message_set, flags, unset=False):
if message_set is None or len(message_set) == 0:
log.error('Invalid message set')
request_message_set = ','.join(str(mail_id) for mail_id in message_set)
status, result = imap_account.uid(
u'STORE',
request_message_set,
... |
def __init__(self, extra_files=None, interval=1):
self.extra_files = set(os.path.abspath(x)
for x in extra_files or ())
self.interval = interval |
def set(self, key, value, timeout=None):
if timeout is None:
timeout = int(time() + self.default_timeout)
elif timeout != 0:
timeout = int(time() + timeout)
filename = self._get_filename(key)
self._prune()
try:
fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix,
... |
def testScrubWithNull2(self):
config = json.loads(self.test_null_json2,
object_pairs_hook=collections.OrderedDict)
r = runner.PipelineRunner()
scrubbed = r.Scrub(config, None)
config['inputs'][1]['sinks'] = ['gs://results_bucket/results.csv']
self.assertEquals(scrubbed, config) |
def do_reverse(value):
"""Reverse the object or return an iterator that iterates over it the other
way round.
"""
if isinstance(value, string_types):
return value[::-1]
try:
return reversed(value)
except TypeError:
try:
rv = list(value)
rv.reverse(... |
def testLeaseFreedOnSliceRetry(self):
# Reinitialize with faulty map function.
self._init_job(__name__ + "." + test_handler_raise_exception.__name__)
self._init_shard()
handler, _ = self._create_handler()
handler.post()
self.assertEqual(httplib.SERVICE_UNAVAILABLE, handler.response.status)
shard_state = ... |
def test_user_env_vars_in_env(self):
response = self.client.get('/env')
env = json.loads(response.data)
self.assertEqual(env[FAKE_ENV_KEY], FAKE_ENV_VALUE)
# USER_EMAIL is a reserved key and doesn't allow user env vars to
# override.
self.assertNotEqual(env['USER_EMAIL'], BAD_USER_EMAIL) |
def testShardFailureAllDone(self):
"""Tests that individual shard failure affects the job outcome."""
for i in range(3):
shard_state = self.create_shard_state(self.mapreduce_id, i)
shard_state.active = False
if i == 0:
shard_state.result_status = model.ShardState.RESULT_FAILED
elif i == 1:
... |
def create_pubsub_stub(host=PUBSUB_ENDPOINT, port=SSL_PORT):
"""Creates a secure pubsub channel."""
ssl_creds = implementations.ssl_channel_credentials(None, None, None)
channel_creds = make_channel_creds(ssl_creds, auth_func)
channel = implementations.secure_channel(host, port, channel_creds)
retur... |
@classmethod
def get_params(cls, mapper_spec, allowed_keys=None, allow_old=True):
params = _get_params(mapper_spec, allowed_keys, allow_old)
# Use the bucket_name defined in mapper_spec params if one was not defined
# specifically in the output_writer params.
if (mapper_spec.params.get(cls.BUCKET_NAME_PARAM) is... |
@classmethod
def split_input(cls, mapper_spec):
"""Split input into multiple shards."""
filelists = mapper_spec.params[cls.FILES_PARAM]
max_values_count = mapper_spec.params.get(cls.MAX_VALUES_COUNT_PARAM, -1)
max_values_size = mapper_spec.params.get(cls.MAX_VALUES_SIZE_PARAM, -1)
return [cls([0] * len(files)... |
def _ParseNagFile(self):
"""Parses the nag file.
Returns:
A NagFile if the file was present else None.
"""
nag_filename = SDKUpdateChecker.MakeNagFilename()
try:
fh = open(nag_filename)
except IOError:
return None
try:
nag = NagFile.Load(fh)
finally:
fh.close()
return nag |
def generate_records(self, filename):
while True:
record = self.queue.get(block=True)
if id(record) == id(RestoreThread._ENTITIES_DONE):
break
entity_proto = entity_pb.EntityProto(contents=str(record))
fixed_entity_proto = self._translate_entity_proto(entity_proto)
yield datastore.Entity._Fr... |
def format_description(self, description):
"""Very simple formatter."""
return description + '\n' |
def testTmpfileName(self):
writer_spec = {self.WRITER_CLS.BUCKET_NAME_PARAM: "test"}
mapreduce_state = self.create_mapreduce_state(output_params=writer_spec)
shard_state = self.create_shard_state(19)
ctx = context.Context(mapreduce_state.mapreduce_spec, shard_state)
context.Context._set(ctx)
writer = self.... |
def __init__(self, consistency_policy=None):
super(BaseTransactionManager, self).__init__()
self._consistency_policy = (consistency_policy or
MasterSlaveConsistencyPolicy())
self._meta_data_lock = threading.Lock()
self._commit_timestamp_lock = threading.Lock()
BaseTransactionM... |
def _MatchFacetRefinements(self, doc, ref_groups):
return all((self._MatchFacetRefinementSameName(doc, ref_same_names)
for ref_same_names in ref_groups.values())) |
def test_update_check_allowed(self):
module1 = object()
module2 = object()
self.config.modules = [module1, module2]
sdk_update_checker.SDKUpdateChecker(
mox.IgnoreArg(), self.config.modules).AndReturn(self.update_check)
self.update_check.CheckSupportedVersion()
self.update_check.AllowedToCheckForUpda... |
def read(self, amt=None):
if self.fp is None:
return ''
if self._method == 'HEAD':
self.close()
return ''
if amt is None:
return self.fp.read()
else:
return self.fp.read(amt) |
def test_internal_symlinks_relative(self):
dir_a_b = self._create_directory('a/b')
dir_p = self._create_directory('p')
os.symlink('../../p', os.path.join(dir_a_b, 'p'))
self._create_directory('p/q/r')
self._watcher.start()
shutil.rmtree(dir_p)
self._watcher.changes() |
def MergeFrom(self, x):
assert x is not self
if (x.has_logs()): self.set_logs(x.logs()) |
def _GetEnviron(self, name):
"""Helper method ensures environment configured as expected.
Args:
name: Name of environment variable to get.
Returns:
Environment variable associated with name.
Raises:
ConfigurationError if required environment variable is not found.
"""
try:
return os.envir... |
def resolve_project_id(self, app_id):
return app_id |
def update_from_mime_message(self, mime_message):
"""Update values from MIME message.
Copies over date values.
Args:
mime_message: email.Message instance to copy information from.
"""
mime_message = _parse_mime_message(mime_message)
super(InboundEmailMessage, self).update_from_mime_message(mime_messag... |
def ByteSize(self):
n = 0
n += self.lengthString(len(self.topic_))
n += self.lengthString(len(self.sub_id_))
return n + 2 |
def _CalculateWriteOps(composite_indexes, old_entity, new_entity):
"""Determines number of entity and index writes needed to write new_entity.
We assume that old_entity represents the current state of the Datastore.
Args:
composite_indexes: The composite_indexes for the kind of the entities.
old_entity:... |
def NormalizeVmSettings(self):
"""Normalize Vm settings.
"""
if self.IsVm():
if not self.vm_settings:
self.vm_settings = VmSettings()
if 'vm_runtime' not in self.vm_settings:
self.SetEffectiveRuntime(self.runtime)
if hasattr(self, 'beta_settings') and self.beta_settings:
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.