text stringlengths 48 9.16k |
|---|
def correct_lis(self, uris, blocking=True, verbose=False):
""" Returns logical interconnects to a consistent state.
The current logical interconnect state is compared to the associated
logical interconnect group. Any differences identified are corrected,
bringing the logical interconnect back to a cons... |
def get_server_profiles(self):
body = self._con.get(uri['profiles'])
return get_members(body) |
def add_post(self, activity_post):
if (self.gplus_posts.count(activity_post.post_id) == 0):
self.gplus_posts.append(activity_post.post_id)
self.calculate_impact()
self.put() |
def IdSet(self, value):
"""Setter to be used for default id EndpointsAliasProperty.
Sets the key on the current entity using the value passed in as the ID.
Using this key, attempts to retrieve the entity from the datastore and
update the unset properties of the current entity with those from the
retrieved en... |
def __call__(self, shape, name=None):
r = np_rng.normal(loc=0, scale=0.01, size=shape)
r = r/np.sqrt(np.sum(r**2))*np.sqrt(shape[1])
return sharedX(r, name=name) |
def shutdown(sts, mode):
print(mode + 'ing Appliance')
ret = sts.shutdown(mode)
pprint(ret) |
@property
def last_voted_blockproposal(self):
"the last block proposal node voted on"
for r in self.rounds:
if isinstance(self.rounds[r].proposal, BlockProposal):
assert isinstance(self.rounds[r].lock, Vote)
if self.rounds[r].proposal.blockhash == self.rounds[r].lock.blockhash:
... |
def __init__(self, motor_config):
"""Initialize a set of DMCCs and their associated motors
:param motor_config: Config entry mapping motor names to DMCC ids and
motor indices
Dictionary entries are in the format:
<motor_name>: { board_num: [0-3], motor_num: [1-2] }
"""
self.config = l... |
def Equals(self, x):
if x is self: return 1
return 1 |
def aes(key, data, counter=False):
""" encrypt data with aes, using either pycryptopp or PyCrypto.
Args
key: The encryption key
data: plain text data
counter: a callable, usually not needed
"""
# using either pycryptopp...
if hasattr(AES, "process"):
a... |
def __call__(self, cmd, shell):
result = self.handler(cmd, shell).strip()
if result:
return result.split("|")
else:
return [] |
def visible(self):
"Return whether the window is displayed and visible."
d = _xwininfo(self.identifier, "stats")
return d["Map State"] == "IsViewable" |
def test_args_port(self):
r = PsDashRunner(args=['-p', '5555'])
self.assertEqual(r.app.config['PSDASH_PORT'], 5555) |
def get_label_from_id_name(self, label_info_dict, key_name=None, label_prefix='', non_formatted=False):
"""
Expects a dict in one of 2 formats (where key_name is "status"):
Format 1:
"status": { "id": 1, "name": "New" },
Format 2:
{ "id":3, "name":"UX/UI Component"}
... |
def _handle_double_quote(s, t):
k, v = t.split('=')
return k, v.strip('"') |
def getspp(sts):
spps = sts.get_spps()
for spp in spps:
print('')
for k, v in spp.items():
if k != 'fwComponents':
print('{0:25} : {1}'.format(k, v))
print('') |
def decode(self, bvlpdu):
BVLCI.update(self, bvlpdu)
self.bvlciBDT = []
while bvlpdu.pduData:
bdte = Address(unpack_ip_addr(bvlpdu.get_data(6)))
bdte.addrMask = bvlpdu.get_long()
self.bvlciBDT.append(bdte) |
def __init__(self, instances, default_queue="pool"):
pool = Queue()
if hasattr(instances, '__iter__'):
for i in instances:
pool.put(i)
else:
pool.put(instances)
self._pool = pool
self._default_queue = default_queue |
def __init__(self, hashFn=0, challenge='', *args, **kwargs):
super(AccessChallenge, self).__init__(*args, **kwargs)
self.bslciFunction = BSLCI.accessChallenge
self.bslciLength = 5
self.bslciHashFn = hashFn
self.bslciChallenge = challenge
if challenge:
self.bslciLength += len(challenge) |
def setUp(self):
"""Setup test hardware files and build motor object."""
config = path.dirname(path.realpath(__file__))+"/test_config.yaml"
self.config = lib.get_config(config)
# Build motor in testing mode
self.pwm_num = self.config["two_motors"][0]["PWM"]
self.gpio_num = self.config["two_mot... |
def __init__(self, peer):
if _debug: TCPClient._debug("__init__ %r", peer)
asyncore.dispatcher.__init__(self)
# ask the dispatcher for a socket
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
# save the peer
self.peer = peer
# create a request buffer
self.request = ''
# ho... |
def merge(a, b, path=None):
"merges b into a"
if path is None: path = []
for key in b:
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
merge(a[key], b[key], path + [str(key)])
elif a[key] == b[key]:
pass # same leaf valu... |
def container_literal(
before, left_delimiter, items, right_delimiter, trailing_comma=True,
):
before = _clean_before(before)
items = list(items)
c = _single_line_container(before, left_delimiter, items, right_delimiter)
if len(c) <= 79:
return c
return _multi_line_container(
b... |
def update(self, pci):
"""Copy the PCI fields."""
_PCI.update(self, pci)
# now do the BACnet PCI fields
self.pduExpectingReply = pci.pduExpectingReply
self.pduNetworkPriority = pci.pduNetworkPriority |
def _call(rel, jwt=None, data=None):
rel = 'http://localhost:5000/api/' + rel
if jwt:
if data:
result = requests.post(rel, headers=_auth_header(jwt), data=json.dumps(data))
else:
result = requests.get(rel, headers=_auth_header(jwt))
else:
if data:
... |
def restore_selections(view, lines_and_pts):
view.sel().clear()
for stored in lines_and_pts:
view.sel().add(region_from_stored_selection(view, stored)) |
def append_segment(self, apdu):
"""This function appends the apdu content to the end of the current
APDU being built. The segmentAPDU is the context."""
if _debug: SSM._debug("append_segment %r", apdu)
# check for no context
if not self.segmentAPDU:
raise RuntimeError("no segmentation cont... |
def _find_images(self, local_dir=None):
if local_dir is None:
local_dir = self.settings.local_images
return _list_files_recursively(local_dir, relative_to_path=True) |
def encode(self, bslpdu):
addrLen = self.bslciAddress.addrLen
# make sure the length is correct
self.bslciLength = 5 + addrLen + len(self.pduData)
BSLCI.update(bslpdu, self)
# encode the address
bslpdu.put(addrLen)
bslpdu.put_data( self.bslciAddress.addrAddr )
# encode the rest of th... |
def load_or_create_user(username):
resources.init('Kickflip', 'Kickflip')
config_json = resources.user.read('config.json')
if not config_json:
resources.user.write('config.json', json.dumps({}, sort_keys=True))
config_json = resources.user.read('config.json')
settings = json.loads(confi... |
def test_is_builtin_variable(id_value, is_function, expected_result):
id_node = create_id(id_value, is_function=is_function)
result = is_builtin_variable(id_node)
assert expected_result == result |
@cherrypy.expose
def template(self, *paths, **_ignored):
template_name = '/'.join(paths)
template_path = os.path.join(self.settings.templates, template_name)
if os.path.isdir(template_path):
return self.edit_renderer.directory(
'Contents of ' + (template_name or 'template directory'),
... |
def set_live_channel_list(self, ch_list):
# swapcase to simulate display name does not necessary have the same
# case as input
for ch in ch_list:
self.live_ch_list[ch.swapcase()] = ch_list[ch] |
@pyqtSlot(bool)
def setEnabled(self, enabled):
super(Window, self).setEnabled(enabled) |
def testWindowFlags(self, windowFlags):
return self.windowFlags() & windowFlags |
@pytest.mark.django_db
def test_orm():
Conf.ORM = 'default'
# check broker
broker = get_broker(list_key='orm_test')
assert broker.ping() is True
assert broker.info() is not None
# clear before we start
broker.delete_queue()
# enqueue
broker.enqueue('test')
assert broker.queue_siz... |
def __init__(self, bounds, body):
self.bounds = bounds
self.args = [body] |
def __init__(self,*args):
assert all(isinstance(a,UpdatePattern) for a in args)
self.args = args |
def segmented_response(self, apdu):
if _debug: ServerSSM._debug("segmented_response %r", apdu)
# client is ready for the next segment
if (apdu.apduType == SegmentAckPDU.pduType):
if _debug: ServerSSM._debug(" - segment ack")
# duplicate ack received?
if not self.in_window(apdu.a... |
def make_oauth_protected_endpoint(*args, **kwargs):
""" Returns a dummy API endpoint that returns True. This endpoint will be
protected with the @oauth_scope decorator -- see that function's signature
for a description of the parameters that may be passed. """
@oauth_scope(*args, **kwargs)
def api_endpoint(ac... |
def __enter__(self):
self.interrupted = False
self.original_handler = signal.getsignal(signal.SIGINT)
def handler(signum, frame):
self.interrupted = True
signal.signal(signal.SIGINT, handler)
return self |
def make_ntlm_negotiate(self):
msg = 'NTLMSSP\x00' # Signature
msg += pack('<I', 1) # Message Type 1
# Flags
self.flags = (
self.NTLMSSP_NEGOTIATE_UNICODE |
self.NTLM_NEGOTIATE_OEM |
self.NTLMSSP_REQUEST_TARGET |
self.NTLMSSP_NEGOTIATE_LM_... |
def parse_with(s,parser,lexer):
global error_list
error_list = []
res = parser.parse(s,lexer=lexer)
if error_list:
raise ParseErrorList(error_list)
return res |
def error(self, proto, *args):
if proto in ['http', 'https']:
# XXX http[s] protocols are special-cased
dict = self.handle_error['http'] # https is not different than http
proto = args[2] # YUCK!
meth_name = 'http_error_%s' % proto
http_err = 1
orig_args = args
e... |
def p_top_macro_atom_eq_lcb_action_rcb(p):
'top : top MACRO atom EQ LCB action RCB'
p[0] = p[1]
d = Definition(app_to_atom(p[3]),p[6])
p[0].declare(MacroDecl(d)) |
def show_graph(self,sg):
return tk_graph_ui.show_graph(sg,self.tk,parent=self,frame=self.state_frame,ui_parent=self.ui_parent) |
def reducer(self, word, counts):
yield(word, sum(counts)) |
def getFlag(self,flag):
try:
return self.flags[flag]
except:
return False |
def init_bsum(self, bsum, flags):
flags |= self.flags
if bsum:
bsum_gpudata = bsum.gpudata
self.bsum_zero = [bsum_gpudata, 0, bsum.size, self.lib.stream]
flags |= 4
else:
bsum_gpudata = 0
self.bsum_zero = 0
flags &= ~4
return bsum_gpudata, flags |
@placebo_session
def test_create_lambda_function(self, session):
bucket_name = 'lmbda'
zip_path = 'Spheres-dev-1454694878.zip'
z = Zappa(session)
z.aws_region = 'us-east-1'
z.load_credentials(session)
z.credentials_arn = 'arn:aws:iam::12345:role/ZappaLambdaExecution'
arn = z.create_lambda_... |
def handle_start_expr(self, token_text):
if self.start_of_statement():
# The conditional starts the statement if appropriate.
pass
next_mode = MODE.Expression
if token_text == '[':
if self.last_type == 'TK_WORD' or self.flags.last_text == ')':
if self.flags.last_text in... |
def _test(self,response):
'''test a value'''
#make an ordered list of cases
ordered_cases = []
for case in self.cases:
if len(ordered_cases) == 0:
ordered_cases.append(self.cases[case])
else:
broke = False
for index in xrange(len(ordered_cases)):
... |
def test_signup_today(self):
today = date.today()
self.assertTrue(self.client.login(username=self.user.username,
password=self.user.username))
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertEqual(DailyActivity.... |
def testDailyEngagementReport(self):
users_test = []
users_control = []
num_control1 = 0
num_test1 = 0
num_control2 = 0
num_test2 = 0
#create users
for i in range(5):
users_control.append(create_user_in_group(self.experiment, i,
Parti... |
@staticmethod
def tokenize(sentence):
"""
Split a sentence into tokens including punctuation.
Args:
sentence (string) : String of sentence to tokenize.
Returns:
list : List of tokens.
"""
return [x.strip() for x in re.split('(\W+)?', sentence) if x.strip()] |
def _OnChar(self, event):
"Handle the OnChar event by rejecting non-numerics"
if event.GetModifiers() != 0 and event.GetModifiers() != wx.MOD_SHIFT:
event.Skip()
return
if event.GetKeyCode() in self.acceptableCodes:
event.Skip()
return
wx.Bell() |
def __init__(self, title, artist, album, sizeInBytes, lastPlayed, rating):
self.title = title
self.artist = artist
self.album = album
a = datetime.datetime.strptime(lastPlayed, "%d/%m/%Y %H:%M")
datenp = np.datetime64(a)
self.lastPlayed = datenp
# self.date = datenp - datenp.astype('datetime... |
def __call__(self, params, cost):
updates = []
grads = T.grad(cost, params)
grads = clip_norms(grads, self.clipnorm)
t = theano.shared(floatX(1.))
b1_t = self.b1*self.l**(t-1)
for p, g in zip(params, grads):
g = self.regularizer.gradient_regularize(p, g)
m = theano.shared(p.g... |
def configure(self, in_obj):
super(ColorNoise, self).configure(in_obj)
self.out_shape = self.in_shape
try:
self.nfm, self.H, self.W = self.in_shape
self.HW = self.H * self.W
except:
raise AttributeError('ColorNoise can only be used with layer providing CHW')
return self |
def SetCellPadding(self, padding):
"""
Set the padding around cells in this format
Padding is either a single numeric (indicating the values on all sides)
or a collection of paddings [left, top, right, bottom]
"""
self.cellPadding = self._MakePadding(padding) |
def addEdit(self, event):
with wx.BusyInfo("Please wait for a moment while ODMTools fetches the data and stores it in our database", parent=self):
logger.debug("Beginning editing")
isSelected, seriesID = self.pnlSelector.onReadyToEdit()
# logger.debug("Initializing DataTable")
# # ... |
def test_qcl_relationship(self):
qcl = self.series.quality_control_level
assert qcl != None |
def analyze(self, filename):
"""Reimplement analyze method"""
if self.dockwidget and not self.ismaximized:
self.dockwidget.setVisible(True)
self.dockwidget.setFocus()
self.dockwidget.raise_()
pythonpath = self.main.get_spyder_pythonpath()
runconf = runconfig.get_run_configuration... |
def _split_stages(node, duplicates=None, aliases=None, stages=None, parents=None):
"""
Split out all reductions and post reduction scalar operations into seperate
stacks (stages)
This leaves remaining in the tree anything not in these categories.
"""
# init data structures
if duplicates is ... |
def on_next(self, element):
self._post_message(self.formatter.create_element_message(element)) |
def getXMLElement(self):
item = ElementTree.Element('item', self.attrb)
for (k, v) in self.content.iteritems():
attrb = {}
if k == 'icon' and self.icon_type:
attrb['type'] = self.icon_type
sub = ElementTree.SubElement(item, k, attrb)
sub.text = v
return item |
def capture_seconds(num_seconds, chunksize, rate, width):
num_buffers = int(float(num_seconds * rate) / chunksize)
return capture_buffers(num_buffers, chunksize, rate, width) |
def configure(self, in_obj):
super(BiRNN, self).configure(in_obj)
(self.nin, self.nsteps) = self.in_shape
self.out_shape = (2 * self.nout, self.nsteps)
self.gate_shape = (2 * self.nout * self.ngates, self.nsteps)
if self.split_inputs is True and self.nin % 2 == 1:
raise ValueError("# input... |
def init_params(self, shape):
"""
Initialize params for GRU including weights and biases.
The weight matrix and bias matrix are concatenated from the weights
for inputs and weights for recurrent inputs and bias.
The shape of the weights are (number of inputs + number of outputs +1 )
by (number o... |
def createOpener():
'''Create a generic opener for http
This is particularly helpful when there is a proxy server in line'''
# Thanks to: http://www.decalage.info/en/python/urllib2noproxy
proxy_handler = urllib2.ProxyHandler(HTTP_PROXY)
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(ope... |
def build(self):
cmd = {'sender': self.sender,
'receiver': self.receiver,
'output': self.output,
'cmd': self.cmd,
'jobid': self.jobid}
return base64.b64encode(json.dumps(cmd)) |
def __init__(self, ttps=None):
super(TTPs, self).__init__(ttps)
self.kill_chains = KillChains() |
def csv_sym(sym, d_data, ls_keys, s_directory):
bool_first_iter = True
for key in ls_keys:
if bool_first_iter == True:
df_sym = d_data[key].reindex(columns = [sym])
df_sym = df_sym.rename(columns = {sym : key})
bool_first_iter = False
else:
df_t... |
@assert_warnings
def test_deprecated_related_packages(self):
e = et.ExploitTarget()
e.related_packages.append(STIXPackage())
self.assertEqual(len(e.related_packages), 1) |
@pytest.mark.parametrize('cls', [AsciiTable, UnixTable])
def test_attributes(cls):
"""Test different table attributes."""
table_data = [
['Name', 'Color', 'Type'],
['Avocado', 'green', 'nut'],
['Tomato', 'red', 'fruit'],
['Lettuce', 'green', 'vegetable'],
]
table = cls(ta... |
def featHiLow(dData, lLookback=20, b_human=False ):
'''
@summary: 1 represents a high for the lookback -1 represents a low
@param dData: Dictionary of data to use
@param lLookback: Number of days to look in the past
@param b_human: if true return dataframe to plot
@return: DataFrame array contai... |
def exportChildren(self, lwrite, level, nsmap, namespace_=XML_NS, name_='ExploitTargetsType', fromsubclass_=False, pretty_print=True):
super(ExploitTargetsType, self).exportChildren(lwrite, level, nsmap, namespace_, name_, fromsubclass_=True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
e... |
@classmethod
@contextmanager
def temp(cls, domain, token):
import shopify
original_site = shopify.ShopifyResource.get_site()
original_token = shopify.ShopifyResource.get_headers().get('X-Shopify-Access-Token')
original_session = shopify.Session(original_site, original_token)
session = Session(domai... |
def sequenceProb(self, newData):
"""
Returns the probability that this HMM generated the given sequence.
Uses the forward-backward algorithm. If given an array of
sequences, returns a 1D array of probabilities.
"""
if len(newData.shape) == 1:
return forwardbackward( self.prior,\
self.transition_matri... |
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Tools':
obj_ = ToolsType.factory()
obj_.build(child_)
self.set_Tools(obj_)
elif nodeName_ == 'Infrastructure':
obj_ = InfrastructureType.factory()
obj_.build(child_)
self.set_I... |
@retry(9, Exception, 0.01, 'pypet.retry')
def _put_on_queue(self, to_put):
"""Puts data on queue"""
old = self.pickle_queue
self.pickle_queue = False
try:
self.queue.put(to_put, block=True)
finally:
self.pickle_queue = old |
def thumbnail_id(self, name):
return '%s_thumb_id' % name |
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Suggested_COA':
obj_ = stix_common_binding.RelatedCourseOfActionType.factory()
obj_.build(child_)
self.Suggested_COA.append(obj_)
super(SuggestedCOAsType, self).buildChildren(child_, node, nodeName_, ... |
def exportAttributes(self, lwrite, level, already_processed, namespace_='ttp:', name_='MalwareInstanceType'):
if self.idref is not None and 'idref' not in already_processed:
already_processed.add('idref')
lwrite(' idref=%s' % (quote_attrib(self.idref), ))
if self.id is not None and 'id' not in a... |
def matchesExclusions(strippedRule):
strippedDomain = strippedRule.split()[1]
for exclusionRegex in settings["exclusionregexs"]:
if exclusionRegex.search(strippedDomain):
return True
return False |
def perform(self, token_stream, text):
return self.function(token_stream, text) |
def test_list_add(self):
fields = brewery.FieldList(["foo", "bar"])
fields.append("baz")
self.assertEqual(3, len(fields)) |
def store_references(self, references):
"""Stores references to disk and may collect garbage."""
for trajectory_name in references:
self._storage_service.store(pypetconstants.LIST, references[trajectory_name], trajectory_name=trajectory_name)
self._check_and_collect_garbage() |
def pop(self):
"""Pop an arbitrary element from the stack."""
try:
key = heapq.heappop(self.heap)
return self.elements[key]
except:
raise StopIteration |
def writeOpeningHeader(finalFile):
finalFile.seek(0) #reset file pointer
fileContents = finalFile.read() #save content
finalFile.seek(0) #write at the top
writeData(finalFile, "# This hosts file is a merged collection of hosts from reputable sources,\n")
writeData(finalFile, "# with a dash of crowd... |
def test_distance(self):
coordinate_pairs = {
1: [[10, 10], [10, 10]],
2: [[33.7550, 84.3900], [40.7127, 74.0059]],
3: [[0, 0], [0, 0]],
4: [[-33.7550, -84.3900], [40.7127, 74.0059]],
}
results = {
1: 0.0,
2: 1200.0,
3: 0.0,
4: 17959
}... |
def send_notification(self, token_hex, payload, identifier=0, expiry=0):
"""
in enhanced mode, send_notification may return error response from APNs if any
"""
if self.enhanced:
self._last_activity_time = time.time()
message = self._get_enhanced_notification(token_hex, payload,
... |
def compare_explore_more_trials_with_removing_duplicates(self,traj):
self.explored ={'Normal.trial': [0,1,0,1,0,1],
'Numpy.double': [np.array([1.0,2.0,3.0,4.0]),
np.array([1.0,2.0,3.0,4.0]),
np.array([-1.0,3.0,5.0,7.0]),
np.array([-1... |
def __init__(self, url):
# TODO: currently only local paths are supported
if is_local(url) and not url.endswith("/"):
url = url + "/"
self.url = url
infopath = urljoin(url, "datapackage.json")
metadata = read_json(infopath)
with open(infopath) as f:
try:
metadata = ... |
def redraw(self, view):
"""Redraw all marks in the given view."""
self.clear(view)
self.draw(view) |
def __init__(self, fields = None, chars = None):
"""Creates a node for string stripping.
:Attributes:
* `fields`: fields to be stripped
* `chars`: characters to be stripped
"""
super(StringStripNode, self).__init__()
self.fields = fields
self.chars = chars |
def _remove_all_contracts(self):
to_remove = list()
with self.contracts_lock:
for c in self.contracts.values():
to_remove.append(c)
for c in to_remove:
self._remove_contract(c) |
def on_query_completions(self, view, prefix, locations):
if not view.match_selector(locations[0],
"source.cfscript.cfc - text - meta - string - comment"):
return []
if not SETTINGS.get("component_method_completions"):
return
# set local _completions variable
_completions = ... |
def initialize_test(self):
"""Test if calculate throws an error as expected."""
data = [[0.0, 0.0], [1, 0.1], [2, 0.2], [3, 0.3], [4, 0.4]]
tsOrg = TimeSeries.from_twodim_list(data)
tsCalc = TimeSeries.from_twodim_list(data)
bem = BaseErrorMeasure()
try:
bem.initialize(tsOrg, tsCalc... |
def testDependentServiceExtension(self):
"""Assigning more references to an attribute"""
importConfigurationManifest(
'wdrtest/manifests/references/dependent_service.wdrc', topology
)
importConfigurationManifest(
'wdrtest/manifests/references/dependent_service2.wdrc', topology
)
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.