text stringlengths 4 1.02M | meta dict |
|---|---|
'''
This module provides interfaces to the RaspberrySTEM CREATOR Kit speaker.
Additionally, it can be used for any audio out over the analog audio jack.
'''
import os
import sys
import time
import re
import io
import select
from functools import partial
from . import mixer # c extension
import tempfile
from threading import RLock, Thread, Condition, Event
from queue import Queue, Full, Empty
from subprocess import call, check_output
from struct import pack, unpack
import socket
'''
Future Sound class member function:
def seek(self, position, absolute=False, percentage=False)
- relative +/- seconds
- absolute +/- seconds (-negative seconds from end)
- absolute percentage
- returns previous position, in seconds
'''
STOP, PLAY, FLUSH = range(3)
CHUNK_BYTES = 1024
SOUND_CACHE = '/home/pi/.rstem_sounds'
SOUND_DIR = '/opt/raspberrystem/sounds'
MIXER_EXE_BASENAME = 'rstem_mixer'
MIXER_EXE_DIRNAME = '/opt/raspberrystem/bin'
MIXER_EXE = os.path.join(MIXER_EXE_DIRNAME, MIXER_EXE_BASENAME)
SERVER_PORT = 8888
def shell_cmd(cmd):
with open(os.devnull, "w") as devnull:
call(cmd, stdout=devnull, stderr=devnull, shell=True)
def start_server():
# start server (if it is not already running)
shell_cmd('pgrep -c {} || {} &'.format(MIXER_EXE_BASENAME, MIXER_EXE))
# Wait until server is up
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
for tries in range(30):
try:
sock.connect(("localhost", SERVER_PORT))
except socket.error:
pass
else:
sock.close()
break
time.sleep(0.1)
def sound_dir():
return SOUND_DIR
def master_volume(level):
if level < 0 or level > 100:
raise ValueError("level must be between 0 and 100.")
shell_cmd('amixer sset PCM {}%'.format(int(level)))
def clean_close(sock):
try:
sock.shutdown(socket.SHUT_RDWR)
except socket.error:
pass
try:
sock.close()
except socket.error:
pass
class BaseSound(object):
# Default master volume
master_volume(100)
start_server()
def __init__(self):
self._SAMPLE_RATE = 44100
self._BYTES_PER_SAMPLE = 2
self._CHANNELS = 1
self._length = 0
self.gain = 1
self.internal_gain = 1
self.start_time = None
self.stop_play_mutex = RLock()
self.stopped = Event()
self.stopped.set()
self.play_msg = Queue()
self.play_count = 0
self.play_thread = Thread(target=self.__play_thread)
self.play_thread.daemon = True
self.play_thread.start()
def length(self):
'''Returns the length of the sound in seconds'''
return self._length
def is_playing(self):
'''Returns `True` if the sound is currently playing'''
return not self.stopped.is_set()
def wait(self, timeout=None):
'''Wait until the sound has finished playing.
If timeout is given (seconds), will return early (after the timeout
time) even if the sound is not finished playing.
Returns itself, so this function can be chained.
'''
assert self.play_thread.is_alive()
self.stopped.wait(timeout)
return self
def stop(self):
'''Immediately stop the sound from playing.
Does nothing if the sound is not currently playing.
Returns itself, so this function can be chained.
'''
assert self.play_thread.is_alive()
with self.stop_play_mutex:
self.play_msg.put((STOP, None))
self.wait()
return self
def play(self, loops=1, duration=None):
'''Starts playing the sound.
This function starts playing the sound, and returns immediately - the
sound plays in the background. To wait for the sound, use `wait()`.
Because sound functions can be chained, to create, play and wait for a
sound to complete can be done in one compound command. For example:
Sound('mysound.wav').play().wait()
`loops` is the number of times the sound should be played. `duration`
is the length of the sound to play (or `None` to play forever, or until
the sound ends).
Returns itself, so this function can be chained.
'''
assert self.play_thread.is_alive()
if duration and duration < 0:
raise ValueError("duration must be a positive number")
with self.stop_play_mutex:
self.stop()
self.end_time = time.time()
previous_play_count = self.play_count
self.play_msg.put((PLAY, (loops, duration)))
# Wait until we know the play has started (i.e., the state ===
# PLAY). Ugly (polled), but simple.
while previous_play_count == self.play_count:
time.sleep(0.001)
return self
def __play_thread(self):
state = STOP
while True:
if state == STOP:
msg, payload = self.play_msg.get()
if msg == PLAY:
self.stopped.clear()
self.play_count += 1
loops, duration = payload
chunk = self._chunker(loops, duration)
count = 0
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", SERVER_PORT))
state = PLAY
elif state == PLAY:
try:
msg, payload = self.play_msg.get_nowait()
except Empty:
msg = None
if msg == STOP:
clean_close(sock)
self.stopped.set()
state = STOP
else:
try:
try:
header = pack('if', count, self.gain)
sock.send(header + next(chunk))
count += 1
except StopIteration:
header = pack('if', -1, 0)
sock.send(header)
state = FLUSH
readable, writable, exceptional = select.select([sock], [], [sock], 0)
if readable:
c = sock.recv(1)
eof = not c or ord(c)
if eof:
state = FLUSH
if exceptional:
state = FLUSH
except socket.error:
clean_close(sock)
self.stopped.set()
state = STOP
# Throttle
time.sleep(0.005)
elif state == FLUSH:
while sock.recv(1):
pass
clean_close(sock)
self.stopped.set()
state = STOP
def _time_to_bytes(self, duration):
if duration == None:
return None
samples = duration * self._SAMPLE_RATE
return samples * self._BYTES_PER_SAMPLE
@property
def volume(self):
'''The volume of the sound object
Each sound object has an volume (idpendent of the `master_volume()`),
between 0 (muted) and 100 (loudest).
The volume is readable/writeable.
'''
return round(self.gain * self.internal_gain * 100)
@volume.setter
def volume(self, level):
if level < 0:
raise ValueError("level must be a positive number")
self.gain = (level/100)/self.internal_gain
# dummy chunking function
def _chunker(self, loops, duration):
return bytes(CHUNK_BYTES)
class Sound(BaseSound):
'''
A Sound object, that plays sounds read in from sound files.
In addition to the Sound object, this module provides some useful global
functions:
master_volume(level):
Sets the master volume (between 0 and 100)
of the audio out.
sound_dir():
Returns the sounds dir, where all sound
files are stored.
'''
def __init__(self, filename):
'''A playable sound backed by the sound file `filename` on disk.
Throws `IOError` if the sound file cannot be read.
'''
super().__init__()
self.bytes = None
if isinstance(filename, bytes):
data = filename
self.file_opener = partial(io.BytesIO, data)
byte_length = len(data)
else:
# normalize path, raltive to SOUND_DIR
try:
filename = os.path.normpath(os.path.join(SOUND_DIR, filename))
except:
raise ValueError("Filename '{}' is not valid".format(filename))
# Is it a file? Not a definitive test here, but used as a courtesy to
# give a better error when the filename is wrong.
if not os.path.isfile(filename):
raise IOError("Sound file '{}' cannot be found".format(filename))
# Create cached file
if not os.path.isdir(SOUND_CACHE):
os.makedirs(SOUND_CACHE)
_, file_ext = os.path.splitext(filename)
if file_ext != '.raw':
# Use sox to convert sound file to raw cached sound
elongated_file_name = re.sub('/', '_', filename)
raw_name = os.path.join(SOUND_CACHE, elongated_file_name)
# If cached file doesn't exist, create it using sox
if not os.path.isfile(raw_name):
soxcmd = 'sox -q {} -L -r44100 -b16 -c1 -traw {}'.format(filename, raw_name)
shell_cmd(soxcmd)
# test error
filename = raw_name
self.file_opener = partial(open, filename, 'rb')
byte_length = os.path.getsize(filename)
self._length = round(byte_length / (self._SAMPLE_RATE * self._BYTES_PER_SAMPLE), 6)
def _chunker(self, loops, duration):
with self.file_opener() as f:
duration_bytes = self._time_to_bytes(duration)
leftover = b''
for loop in reversed(range(loops)):
f.seek(0)
bytes_written = 0
while duration_bytes == None or bytes_written < duration_bytes:
if leftover:
chunk = leftover + f.read(CHUNK_BYTES - len(leftover))
leftover = b''
else:
chunk = f.read(CHUNK_BYTES)
if chunk:
if len(chunk) < CHUNK_BYTES and loop > 0:
# Save partial chunk as leftovers
leftover = chunk
break
else:
# Pad silence, if we're on the last loop and it's not a full chunk
if loop == 0:
chunk = chunk + bytes(CHUNK_BYTES)[len(chunk):]
bytes_written += CHUNK_BYTES
yield chunk
else:
# EOF
break
class Note(BaseSound):
'''A sine wave sound object. '''
def __init__(self, pitch):
'''Create a sound object that is a sine wave of the given `pitch`.
'''
super().__init__()
A4_frequency = 440
A6_frequency = A4_frequency * 2 * 2
try:
self.frequency = float(pitch)
except ValueError:
match = re.search('^([A-G])([b#]?)([0-9]?)$', pitch)
if not match:
raise ValueError("pitch parameter must be a frequency or note (e.g. 'A', 'B#', or 'Cb4'")
note, semitone, octave = match.groups()
if not semitone:
semitone_adjust = 0
elif semitone == 'b':
semitone_adjust = -1
else:
semitone_adjust = 1
if not octave:
octave = 4
octave = int(octave)
half_step_map = {'C' : 0, 'D' : 2, 'E' : 4, 'F' : 5, 'G' : 7, 'A' : 9, 'B' : 11}
half_steps = octave * 12 + half_step_map[note]
half_steps += semitone_adjust
# Adjust half steps relative to A4 440Hz
half_steps -= 4 * 12 + 9
self.frequency = 2 ** (half_steps / 12.0) * A4_frequency
# Simple bass boost: scale up the volume of lower frequency notes. For
# each octave below a 'A6', double the volume
if self.frequency < A6_frequency:
self.internal_gain = A6_frequency / self.frequency
def play(self, duration=1):
super().play(duration=duration)
return self
def _chunker(self, loops, duration):
if duration == None:
chunks = 999999999
else:
chunks = int((self._time_to_bytes(duration) * loops) / CHUNK_BYTES)
for chunk in range(chunks):
yield mixer.note(chunk, float(self.frequency))
class Speech(Sound):
'''A text-to-speech sound object.'''
def __init__(self, text, espeak_options=''):
'''Create a sound object that is text-to-speech of the given `text`.
The sound is created using the espeak engine (an external program).
Command line options to espeak can be added using `espeak_options`.
'''
wav_fd, wav_name = tempfile.mkstemp(suffix='.wav')
os.system('espeak {} -w {} "{}"'.format(espeak_options, wav_name, text))
os.close(wav_fd)
self.wav_name = wav_name
super().__init__(wav_name)
def __del__(self):
os.remove(self.wav_name)
__all__ = ['Sound', 'Note', 'Speech', 'master_volume' 'sound_dir']
| {
"content_hash": "1266c158a34a7a61641782b751bda3f1",
"timestamp": "",
"source": "github",
"line_count": 418,
"max_line_length": 105,
"avg_line_length": 33.66985645933014,
"alnum_prop": 0.5271422481170953,
"repo_name": "scottsilverlabs/raspberrystem",
"id": "9e2d0c7c0c612fac5a3865aa48d58f3efa6b6edc",
"size": "14672",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rstem/sound/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "22961"
},
{
"name": "HTML",
"bytes": "231804"
},
{
"name": "Makefile",
"bytes": "10187"
},
{
"name": "Python",
"bytes": "327178"
},
{
"name": "Shell",
"bytes": "8375"
}
],
"symlink_target": ""
} |
import os
import sys
from file_manager.vhdl_reader import Vhdl_reader
from decorator.pdfdrawer import PdfDrawer
from tools.options import Options
"""
pyVhdl2Sch takes a .vhd file and return a pdf : name_of_the_entity.pdf.
"""
options = Options()
files = []
options.analyse_args(sys.argv)
for i in range(0, len(options.files)):
filename = options.files[i]
try:
os.path.isfile(filename)
except:
print("File do not exist!\n")
options.print_usage()
sys.exit
reader = Vhdl_reader(filename, options)
options.filename = "%s." % reader.entity.name + "%s" % options.format
drawer = PdfDrawer("%s." % reader.entity.name + "%s" %
options.format, reader.entity, options)
print(("The schematic was generated and named : %s." % reader.entity.name + "%s" % options.format))
| {
"content_hash": "9a98bceccf87bb310cbe1e7c66f95f70",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 103,
"avg_line_length": 27.387096774193548,
"alnum_prop": 0.6513545347467609,
"repo_name": "LaurentCabaret/pyVhdl2Sch",
"id": "6e733ceadf6d07fd8dc853775b1b6b97d10ce038",
"size": "892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyV2S.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "78"
},
{
"name": "Python",
"bytes": "65699"
},
{
"name": "Shell",
"bytes": "178"
},
{
"name": "VHDL",
"bytes": "10880"
}
],
"symlink_target": ""
} |
"""
Test for pytimeseries library
"""
from transformer import transformer
from AR import AR
from AR import AR_Ridge_2
from AR import AR_Lasso
from AR import AR_ElasticNet
from HoltWinters import HoltWinters
import pandas
import matplotlib
ts = pandas.Series.from_csv('champagne.csv', index_col = 0, header = 0)
#ts = pandas.Series.from_csv('champagne_short.csv', index_col = 0, header = 0)
#mytransform = transformer(trans = 'boxcox')
#transformed = mytransform.fit_transform(ts)
#model = AR(p = 3)
#model = model.fit(transformed)
#ahead = model.forecast(transformed, periods = 2)
#original = mytransform.restore(ahead)
#model_1 = AR_Lasso(p = 3)
#model_1 = model_1.fit(ts)
#result_1 = model_1.predict(ts)
#model_2 = AR(p = 3)
#model_2 = model_2.fit(ts)
#result_2 = model_2.predict(ts)
#model_3 = AR_Ridge_2(p = 3, alpha=0.1)
#model_3 = model_3.fit(ts)
#result_3 = model_3.predict(ts)
#model_4 = AR_Ridge_2(p = 3, alpha=0.5)
#model_4 = model_4.fit(ts)
#result_4 = model_4.predict(ts)
model = HoltWinters(alpha = 0.9, beta = False, gamma = False)
result = model.predict(ts)
model_2 = HoltWinters(alpha = 0.9, beta = 0.1, gamma = False)
result_2 = model_2.predict(ts)
matplotlib.pyplot.plot(ts)
matplotlib.pyplot.plot(result)
#matplotlib.pyplot.plot(result_1)
matplotlib.pyplot.plot(result_2)
#matplotlib.pyplot.plot(result_3)
#matplotlib.pyplot.plot(result_4)
matplotlib.pyplot.show()
| {
"content_hash": "efd0ff6aee44d1dd46db4590ded4fb2b",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 78,
"avg_line_length": 20.735294117647058,
"alnum_prop": 0.7056737588652482,
"repo_name": "jdvelasq/pytimeseries",
"id": "d565585ab8f8f51839e1fb6a20cf57f01df2fae9",
"size": "1410",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytimeseries/pytimeseries_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "200429"
},
{
"name": "Python",
"bytes": "97700"
}
],
"symlink_target": ""
} |
import unittest
from .. import sqltest
import_error = False
try:
from ...auction.seller import Seller
except ImportError:
import_error = True
Seller = None
class TestCase00(unittest.TestCase):
def test_import(self):
self.assertFalse(import_error)
class TestCase01(sqltest.TestSQL):
def setUp(self):
super(TestCase01, self).setUp()
if import_error:
self.skipTest('ImportError')
else:
self.ob = Seller(self.db, fail=True)
| {
"content_hash": "ce6ada8c14a9eb93a0ed0862509946af",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 48,
"avg_line_length": 20.916666666666668,
"alnum_prop": 0.649402390438247,
"repo_name": "AdamGagorik/pydarkstar",
"id": "e4e0b71adc18b1e642b9738c2b2c602306dafccb",
"size": "502",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pydarkstar/tests/auction/test_seller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "532"
},
{
"name": "Python",
"bytes": "119228"
}
],
"symlink_target": ""
} |
import marshmallow as ma
import peewee as pw
import pytest
from marshmallow_peewee.fields import ForeignKey
from .models import User
@pytest.fixture
def convertor():
from marshmallow_peewee.convert import ModelConverter
from marshmallow_peewee.schema import SchemaOpts
class Meta:
model = User
return ModelConverter(SchemaOpts(Meta))
def test_boolean(convertor):
ma_field = convertor.convert_field(User.active)
assert ma_field
assert isinstance(ma_field, ma.fields.Boolean)
assert ma_field.load_default is True
assert ma_field.metadata
assert ma_field.metadata["description"] == "Is user active"
def test_deferred(convertor):
class Test(pw.Model):
user = pw.DeferredForeignKey("Child")
ma_field = convertor.convert_field(Test.user)
assert isinstance(ma_field, ForeignKey)
| {
"content_hash": "b72abf104ddce0eddc91f9b60f03ffd8",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 63,
"avg_line_length": 24.314285714285713,
"alnum_prop": 0.7344300822561692,
"repo_name": "klen/marshmallow-peewee",
"id": "6ccf1f6a744bb3f0f667de788778426533e3d181",
"size": "851",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/test_convert.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1413"
},
{
"name": "Python",
"bytes": "24790"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import os
import random
import sys
import time
import traceback
import types
from collections import defaultdict
from copy import deepcopy
from shutil import move
from tempfile import mkstemp
from django.conf import settings
from django.core.cache import cache
import six
try:
from importlib import import_module
except ImportError: # python < 2.7 compatibility
from django.utils.importlib import import_module
from graphite.logger import log
from graphite.node import LeafNode
from graphite.intervals import Interval, IntervalSet
from graphite.finders.utils import FindQuery, BaseFinder
from graphite.readers import MultiReader
from graphite.worker_pool.pool import get_pool, pool_exec, Job, PoolTimeoutError
from graphite.render.grammar import grammar
def get_finders(finder_path):
module_name, class_name = finder_path.rsplit('.', 1)
module = import_module(module_name)
cls = getattr(module, class_name)
if getattr(cls, 'factory', None):
return cls.factory()
# monkey patch so legacy finders will work
finder = cls()
if sys.version_info[0] >= 3:
finder.fetch = types.MethodType(BaseFinder.fetch, finder)
finder.find_multi = types.MethodType(BaseFinder.find_multi, finder)
finder.get_index = types.MethodType(BaseFinder.get_index, finder)
else:
finder.fetch = types.MethodType(BaseFinder.fetch.__func__, finder)
finder.find_multi = types.MethodType(BaseFinder.find_multi.__func__, finder)
finder.get_index = types.MethodType(BaseFinder.get_index.__func__, finder)
return [finder]
def get_tagdb(tagdb_path):
module_name, class_name = tagdb_path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, class_name)(settings, cache=cache, log=log)
class Store(object):
def __init__(self, finders=None, tagdb=None):
if finders is None:
finders = []
for finder_path in settings.STORAGE_FINDERS:
finders.extend(get_finders(finder_path))
self.finders = finders
if tagdb is None:
tagdb = get_tagdb(settings.TAGDB or 'graphite.tags.base.DummyTagDB')
self.tagdb = tagdb
def get_finders(self, local=False):
for finder in self.finders:
# Support legacy finders by defaulting to 'disabled = False'
if getattr(finder, 'disabled', False):
continue
# Support legacy finders by defaulting to 'local = True'
if local and not getattr(finder, 'local', True):
continue
yield finder
def pool_exec(self, jobs, timeout):
if not jobs:
return []
thread_count = 0
if settings.USE_WORKER_POOL:
thread_count = min(len(self.finders), settings.POOL_MAX_WORKERS)
return pool_exec(get_pool('finders', thread_count), jobs, timeout)
def wait_jobs(self, jobs, timeout, context):
if not jobs:
return []
start = time.time()
results = []
failed = []
done = 0
try:
for job in self.pool_exec(jobs, timeout):
elapsed = time.time() - start
done += 1
if job.exception:
failed.append(job)
log.info("Exception during %s after %fs: %s" % (
job, elapsed, str(job.exception))
)
else:
log.debug("Got a result for %s after %fs" % (job, elapsed))
results.append(job.result)
except PoolTimeoutError:
message = "Timed out after %fs for %s" % (
time.time() - start, context
)
log.info(message)
if done == 0:
raise Exception(message)
if len(failed) == done:
message = "All requests failed for %s (%d)" % (
context, len(failed)
)
for job in failed:
message += "\n\n%s: %s: %s" % (
job, job.exception,
'\n'.join(traceback.format_exception(*job.exception_info))
)
raise Exception(message)
if len(results) < len(jobs) and settings.STORE_FAIL_ON_ERROR:
message = "%s request(s) failed for %s (%d)" % (
len(jobs) - len(results), context, len(jobs)
)
for job in failed:
message += "\n\n%s: %s: %s" % (
job, job.exception,
'\n'.join(traceback.format_exception(*job.exception_info))
)
raise Exception(message)
return results
def fetch(self, patterns, startTime, endTime, now, requestContext):
# deduplicate patterns
patterns = sorted(set(patterns))
if not patterns:
return []
log.debug(
'graphite.storage.Store.fetch :: Starting fetch on all backends')
jobs = []
tag_patterns = None
pattern_aliases = defaultdict(list)
for finder in self.get_finders(requestContext.get('localOnly')):
# if the finder supports tags, just pass the patterns through
if getattr(finder, 'tags', False):
job = Job(
finder.fetch, 'fetch for %s' % patterns,
patterns, startTime, endTime,
now=now, requestContext=requestContext
)
jobs.append(job)
continue
# if we haven't resolved the seriesByTag calls, build resolved patterns and translation table
if tag_patterns is None:
tag_patterns, pattern_aliases = self._tag_patterns(patterns, requestContext)
# dispatch resolved patterns to finder
job = Job(
finder.fetch,
'fetch for %s' % tag_patterns,
tag_patterns, startTime, endTime,
now=now, requestContext=requestContext
)
jobs.append(job)
done = 0
errors = 0
# Start fetches
start = time.time()
results = self.wait_jobs(jobs, settings.FETCH_TIMEOUT,
'fetch for %s' % str(patterns))
results = [i for l in results for i in l] # flatten
# translate path expressions for responses from resolved seriesByTag patterns
for result in results:
if result['name'] == result['pathExpression'] and result['pathExpression'] in pattern_aliases:
for pathExpr in pattern_aliases[result['pathExpression']]:
newresult = deepcopy(result)
newresult['pathExpression'] = pathExpr
results.append(newresult)
log.debug("Got all fetch results for %s in %fs" % (str(patterns), time.time() - start))
return results
def _tag_patterns(self, patterns, requestContext):
tag_patterns = []
pattern_aliases = defaultdict(list)
for pattern in patterns:
# if pattern isn't a seriesByTag call, just add it to the list
if not pattern.startswith('seriesByTag('):
tag_patterns.append(pattern)
continue
# perform the tagdb lookup
exprs = tuple([
t.string[1:-1]
for t in grammar.parseString(pattern).expression.call.args
if t.string
])
taggedSeries = self.tagdb.find_series(exprs, requestContext=requestContext)
if not taggedSeries:
continue
# add to translation table for path matching
for series in taggedSeries:
pattern_aliases[series].append(pattern)
# add to list of resolved patterns
tag_patterns.extend(taggedSeries)
return sorted(set(tag_patterns)), pattern_aliases
def get_index(self, requestContext=None):
log.debug('graphite.storage.Store.get_index :: Starting get_index on all backends')
if not requestContext:
requestContext = {}
context = 'get_index'
jobs = [
Job(finder.get_index, context, requestContext=requestContext)
for finder in self.get_finders(local=requestContext.get('localOnly'))
]
start = time.time()
results = self.wait_jobs(jobs, settings.FETCH_TIMEOUT, context)
results = [i for l in results if l is not None for i in l] # flatten
log.debug("Got all index results in %fs" % (time.time() - start))
return sorted(list(set(results)))
def find(self, pattern, startTime=None, endTime=None, local=False, headers=None, leaves_only=False):
query = FindQuery(
pattern, startTime, endTime,
local=local,
headers=headers,
leaves_only=leaves_only
)
warn_threshold = settings.METRICS_FIND_WARNING_THRESHOLD
fail_threshold = settings.METRICS_FIND_FAILURE_THRESHOLD
matched_leafs = 0
for match in self._find(query):
if isinstance(match, LeafNode):
matched_leafs += 1
elif leaves_only:
continue
if matched_leafs > fail_threshold:
raise Exception(
("Query %s yields too many results and failed "
"(failure threshold is %d)") % (pattern, fail_threshold))
yield match
if matched_leafs > warn_threshold:
log.warning(
("Query %s yields large number of results up to %d "
"(warning threshold is %d)") % (
pattern, matched_leafs, warn_threshold))
def _find(self, query):
context = 'find %s' % query
jobs = [
Job(finder.find_nodes, context, query)
for finder in self.get_finders(query.local)
]
# Group matching nodes by their path
nodes_by_path = defaultdict(list)
# Start finds
start = time.time()
results = self.wait_jobs(jobs, settings.FIND_TIMEOUT, context)
for result in results:
for node in result or []:
nodes_by_path[node.path].append(node)
log.debug("Got all find results for %s in %fs" % (
str(query), time.time() - start)
)
return self._list_nodes(query, nodes_by_path)
def _list_nodes(self, query, nodes_by_path):
# Reduce matching nodes for each path to a minimal set
found_branch_nodes = set()
items = list(six.iteritems(nodes_by_path))
random.shuffle(items)
for path, nodes in items:
leaf_nodes = []
# First we dispense with the BranchNodes
for node in nodes:
if node.is_leaf:
leaf_nodes.append(node)
# TODO need to filter branch nodes based on requested
# interval... how?!?!?
elif node.path not in found_branch_nodes:
yield node
found_branch_nodes.add(node.path)
leaf_node = self._merge_leaf_nodes(query, path, leaf_nodes)
if leaf_node:
yield leaf_node
def _merge_leaf_nodes(self, query, path, leaf_nodes):
"""Get a single node from a list of leaf nodes."""
if not leaf_nodes:
return None
# Fast-path when there is a single node.
if len(leaf_nodes) == 1:
return leaf_nodes[0]
# Calculate best minimal node set
minimal_node_set = set()
covered_intervals = IntervalSet([])
# If the query doesn't fall entirely within the FIND_TOLERANCE window
# we disregard the window. This prevents unnecessary remote fetches
# caused when carbon's cache skews node.intervals, giving the appearance
# remote systems have data we don't have locally, which we probably
# do.
now = int(time.time())
tolerance_window = now - settings.FIND_TOLERANCE
disregard_tolerance_window = query.interval.start < tolerance_window
prior_to_window = Interval(float('-inf'), tolerance_window)
def measure_of_added_coverage(
node, drop_window=disregard_tolerance_window):
relevant_intervals = node.intervals.intersect_interval(
query.interval)
if drop_window:
relevant_intervals = relevant_intervals.intersect_interval(
prior_to_window)
return covered_intervals.union(
relevant_intervals).size - covered_intervals.size
nodes_remaining = list(leaf_nodes)
# Prefer local nodes first (and do *not* drop the tolerance window)
for node in leaf_nodes:
if node.local and measure_of_added_coverage(node, False) > 0:
nodes_remaining.remove(node)
minimal_node_set.add(node)
covered_intervals = covered_intervals.union(node.intervals)
if settings.REMOTE_STORE_MERGE_RESULTS:
remote_nodes = [n for n in nodes_remaining if not n.local]
for node in remote_nodes:
nodes_remaining.remove(node)
minimal_node_set.add(node)
covered_intervals = covered_intervals.union(node.intervals)
else:
while nodes_remaining:
node_coverages = [(measure_of_added_coverage(n), n)
for n in nodes_remaining]
best_coverage, best_node = max(node_coverages)
if best_coverage == 0:
break
nodes_remaining.remove(best_node)
minimal_node_set.add(best_node)
covered_intervals = covered_intervals.union(
best_node.intervals)
# Sometimes the requested interval falls within the caching window.
# We include the most likely node if the gap is within
# tolerance.
if not minimal_node_set:
def distance_to_requested_interval(node):
if not node.intervals:
return float('inf')
latest = sorted(
node.intervals, key=lambda i: i.end)[-1]
distance = query.interval.start - latest.end
return distance if distance >= 0 else float('inf')
best_candidate = min(
leaf_nodes, key=distance_to_requested_interval)
if distance_to_requested_interval(
best_candidate) <= settings.FIND_TOLERANCE:
minimal_node_set.add(best_candidate)
if not minimal_node_set:
return None
elif len(minimal_node_set) == 1:
return minimal_node_set.pop()
else:
reader = MultiReader(minimal_node_set)
return LeafNode(path, reader)
def tagdb_auto_complete_tags(self, exprs, tagPrefix=None, limit=None, requestContext=None):
log.debug(
'graphite.storage.Store.auto_complete_tags :: Starting lookup on all backends')
if requestContext is None:
requestContext = {}
context = 'tags for %s %s' % (str(exprs), tagPrefix or '')
jobs = []
use_tagdb = False
for finder in self.get_finders(requestContext.get('localOnly')):
if getattr(finder, 'tags', False):
job = Job(
finder.auto_complete_tags, context,
exprs, tagPrefix=tagPrefix,
limit=limit, requestContext=requestContext
)
jobs.append(job)
else:
use_tagdb = True
results = set()
# if we're using the local tagdb then execute it (in the main thread
# so that LocalDatabaseTagDB will work)
if use_tagdb:
results.update(self.tagdb.auto_complete_tags(
exprs, tagPrefix=tagPrefix,
limit=limit, requestContext=requestContext
))
# Start fetches
start = time.time()
for result in self.wait_jobs(jobs, settings.FIND_TIMEOUT, context):
results.update(result)
# sort & limit results
results = sorted(results)
if limit:
results = results[:int(limit)]
log.debug("Got all autocomplete %s in %fs" % (
context, time.time() - start)
)
return results
def tagdb_auto_complete_values(self, exprs, tag, valuePrefix=None, limit=None, requestContext=None):
log.debug(
'graphite.storage.Store.auto_complete_values :: Starting lookup on all backends')
if requestContext is None:
requestContext = {}
context = 'values for %s %s %s' % (str(exprs), tag, valuePrefix or '')
jobs = []
use_tagdb = False
for finder in self.get_finders(requestContext.get('localOnly')):
if getattr(finder, 'tags', False):
job = Job(
finder.auto_complete_values, context,
exprs, tag, valuePrefix=valuePrefix,
limit=limit, requestContext=requestContext
)
jobs.append(job)
else:
use_tagdb = True
# start finder jobs
start = time.time()
results = set()
# if we're using the local tagdb then execute it (in the main thread
# so that LocalDatabaseTagDB will work)
if use_tagdb:
results.update(self.tagdb.auto_complete_values(
exprs, tag, valuePrefix=valuePrefix,
limit=limit, requestContext=requestContext
))
for result in self.wait_jobs(jobs, settings.FIND_TIMEOUT, context):
results.update(result)
# sort & limit results
results = sorted(results)
if limit:
results = results[:int(limit)]
log.debug("Got all autocomplete %s in %fs" % (
context, time.time() - start)
)
return results
def extractForwardHeaders(request):
headers = {}
for name in settings.REMOTE_STORE_FORWARD_HEADERS:
value = request.META.get('HTTP_%s' % name.upper().replace('-', '_'))
if value is not None:
headers[name] = value
return headers
def write_index(index=None):
if not index:
index = settings.INDEX_FILE
try:
fd, tmp = mkstemp()
try:
tmp_index = os.fdopen(fd, 'wt')
for metric in STORE.get_index():
tmp_index.write("{0}\n".format(metric))
finally:
tmp_index.close()
move(tmp, index)
finally:
try:
os.unlink(tmp)
except:
pass
return None
STORE = Store()
| {
"content_hash": "e318be45eb2fb042cff9b83d0aa1b2ad",
"timestamp": "",
"source": "github",
"line_count": 538,
"max_line_length": 104,
"avg_line_length": 34.853159851301115,
"alnum_prop": 0.5738360620766892,
"repo_name": "DanCech/graphite-web",
"id": "3c25bc9d9253e569ce86392ab55d38b2d627e1d9",
"size": "18751",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webapp/graphite/storage.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "150191"
},
{
"name": "HTML",
"bytes": "21521"
},
{
"name": "JavaScript",
"bytes": "1690375"
},
{
"name": "Perl",
"bytes": "857"
},
{
"name": "Python",
"bytes": "1234658"
},
{
"name": "Ruby",
"bytes": "1950"
},
{
"name": "Shell",
"bytes": "1113"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='debug',
name='state',
field=models.CharField(default=b'off', max_length=3),
),
]
| {
"content_hash": "a33a4a47bbc1bf882e327f0237bf5919",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 65,
"avg_line_length": 20.77777777777778,
"alnum_prop": 0.5775401069518716,
"repo_name": "opentrv/ors",
"id": "03fa9a010c55cbf72b1264d1a583cc60aaa5a799",
"size": "398",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/migrations/0002_debug_state.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5146"
},
{
"name": "HTML",
"bytes": "8275"
},
{
"name": "JavaScript",
"bytes": "126094"
},
{
"name": "Jupyter Notebook",
"bytes": "23177"
},
{
"name": "Python",
"bytes": "167279"
},
{
"name": "Shell",
"bytes": "690"
}
],
"symlink_target": ""
} |
import ast
from typing import Optional, TYPE_CHECKING
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QComboBox, QTabWidget, QDialog,
QSpinBox, QFileDialog, QCheckBox, QLabel,
QVBoxLayout, QGridLayout, QLineEdit,
QPushButton, QWidget, QHBoxLayout)
from electrum_ltc.i18n import _, languages
from electrum_ltc import util, coinchooser, paymentrequest
from electrum_ltc.util import base_units_list, event_listener
from electrum_ltc.gui import messages
from .util import (ColorScheme, WindowModalDialog, HelpLabel, Buttons,
CloseButton, QtEventListener)
if TYPE_CHECKING:
from electrum_ltc.simple_config import SimpleConfig
from .main_window import ElectrumWindow
class SettingsDialog(QDialog, QtEventListener):
def __init__(self, window: 'ElectrumWindow', config: 'SimpleConfig'):
QDialog.__init__(self)
self.setWindowTitle(_('Preferences'))
self.setMinimumWidth(500)
self.config = config
self.network = window.network
self.app = window.app
self.need_restart = False
self.fx = window.fx
self.wallet = window.wallet
self.register_callbacks()
self.app.alias_received_signal.connect(self.set_alias_color)
vbox = QVBoxLayout()
tabs = QTabWidget()
# language
lang_help = _('Select which language is used in the GUI (after restart).')
lang_label = HelpLabel(_('Language') + ':', lang_help)
lang_combo = QComboBox()
lang_combo.addItems(list(languages.values()))
lang_keys = list(languages.keys())
lang_cur_setting = self.config.get("language", '')
try:
index = lang_keys.index(lang_cur_setting)
except ValueError: # not in list
index = 0
lang_combo.setCurrentIndex(index)
if not self.config.is_modifiable('language'):
for w in [lang_combo, lang_label]: w.setEnabled(False)
def on_lang(x):
lang_request = list(languages.keys())[lang_combo.currentIndex()]
if lang_request != self.config.get('language'):
self.config.set_key("language", lang_request, True)
self.need_restart = True
lang_combo.currentIndexChanged.connect(on_lang)
nz_help = _('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"')
nz_label = HelpLabel(_('Zeros after decimal point') + ':', nz_help)
nz = QSpinBox()
nz.setMinimum(0)
nz.setMaximum(self.config.decimal_point)
nz.setValue(self.config.num_zeros)
if not self.config.is_modifiable('num_zeros'):
for w in [nz, nz_label]: w.setEnabled(False)
def on_nz():
value = nz.value()
if self.config.num_zeros != value:
self.config.num_zeros = value
self.config.set_key('num_zeros', value, True)
self.app.refresh_tabs_signal.emit()
nz.valueChanged.connect(on_nz)
# invoices
bolt11_fallback_cb = QCheckBox(_('Add on-chain fallback to lightning invoices'))
bolt11_fallback_cb.setChecked(bool(self.config.get('bolt11_fallback', True)))
bolt11_fallback_cb.setToolTip(_('Add fallback addresses to BOLT11 lightning invoices.'))
def on_bolt11_fallback(x):
self.config.set_key('bolt11_fallback', bool(x))
bolt11_fallback_cb.stateChanged.connect(on_bolt11_fallback)
bip21_lightning_cb = QCheckBox(_('Add lightning invoice to litecoin URIs'))
bip21_lightning_cb.setChecked(bool(self.config.get('bip21_lightning', False)))
bip21_lightning_cb.setToolTip(_('This may create larger qr codes.'))
def on_bip21_lightning(x):
self.config.set_key('bip21_lightning', bool(x))
bip21_lightning_cb.stateChanged.connect(on_bip21_lightning)
use_rbf = bool(self.config.get('use_rbf', True))
use_rbf_cb = QCheckBox(_('Use Replace-By-Fee'))
use_rbf_cb.setChecked(use_rbf)
use_rbf_cb.setToolTip(
_('If you check this box, your transactions will be marked as non-final,') + '\n' + \
_('and you will have the possibility, while they are unconfirmed, to replace them with transactions that pay higher fees.') + '\n' + \
_('Note that some merchants do not accept non-final transactions until they are confirmed.'))
def on_use_rbf(x):
self.config.set_key('use_rbf', bool(x))
batch_rbf_cb.setEnabled(bool(x))
use_rbf_cb.stateChanged.connect(on_use_rbf)
batch_rbf_cb = QCheckBox(_('Batch RBF transactions'))
batch_rbf_cb.setChecked(bool(self.config.get('batch_rbf', False)))
batch_rbf_cb.setEnabled(use_rbf)
batch_rbf_cb.setToolTip(
_('If you check this box, your unconfirmed transactions will be consolidated into a single transaction.') + '\n' + \
_('This will save fees.'))
def on_batch_rbf(x):
self.config.set_key('batch_rbf', bool(x))
batch_rbf_cb.stateChanged.connect(on_batch_rbf)
# lightning
help_recov = _(messages.MSG_RECOVERABLE_CHANNELS)
recov_cb = QCheckBox(_("Create recoverable channels"))
enable_toggle_use_recoverable_channels = bool(self.wallet.lnworker and self.wallet.lnworker.can_have_recoverable_channels())
recov_cb.setEnabled(enable_toggle_use_recoverable_channels)
recov_cb.setToolTip(messages.to_rtf(help_recov))
recov_cb.setChecked(bool(self.config.get('use_recoverable_channels', True)) and enable_toggle_use_recoverable_channels)
def on_recov_checked(x):
self.config.set_key('use_recoverable_channels', bool(x))
recov_cb.stateChanged.connect(on_recov_checked)
help_trampoline = _(messages.MSG_HELP_TRAMPOLINE)
trampoline_cb = QCheckBox(_("Use trampoline routing (disable gossip)"))
trampoline_cb.setToolTip(messages.to_rtf(help_trampoline))
trampoline_cb.setChecked(not bool(self.config.get('use_gossip', False)))
def on_trampoline_checked(use_trampoline):
use_gossip = not bool(use_trampoline)
self.config.set_key('use_gossip', use_gossip)
if use_gossip:
self.network.start_gossip()
else:
self.network.run_from_another_thread(
self.network.stop_gossip())
util.trigger_callback('ln_gossip_sync_progress')
# FIXME: update all wallet windows
util.trigger_callback('channels_updated', self.wallet)
trampoline_cb.stateChanged.connect(on_trampoline_checked)
help_instant_swaps = ' '.join([
_("If this option is checked, your client will complete reverse swaps before the funding transaction is confirmed."),
_("Note you are at risk of losing the funds in the swap, if the funding transaction never confirms.")
])
instant_swaps_cb = QCheckBox(_("Allow instant swaps"))
instant_swaps_cb.setToolTip(messages.to_rtf(help_instant_swaps))
instant_swaps_cb.setChecked(bool(self.config.get('allow_instant_swaps', False)))
def on_instant_swaps_checked(allow_instant_swaps):
self.config.set_key('allow_instant_swaps', bool(allow_instant_swaps))
instant_swaps_cb.stateChanged.connect(on_instant_swaps_checked)
help_remote_wt = ' '.join([
_("A watchtower is a daemon that watches your channels and prevents the other party from stealing funds by broadcasting an old state."),
_("If you have private a watchtower, enter its URL here."),
_("Check our online documentation if you want to configure Electrum as a watchtower."),
])
remote_wt_cb = QCheckBox(_("Use a remote watchtower"))
remote_wt_cb.setToolTip('<p>'+help_remote_wt+'</p>')
remote_wt_cb.setChecked(bool(self.config.get('use_watchtower', False)))
def on_remote_wt_checked(x):
self.config.set_key('use_watchtower', bool(x))
self.watchtower_url_e.setEnabled(bool(x))
remote_wt_cb.stateChanged.connect(on_remote_wt_checked)
watchtower_url = self.config.get('watchtower_url')
self.watchtower_url_e = QLineEdit(watchtower_url)
self.watchtower_url_e.setEnabled(self.config.get('use_watchtower', False))
def on_wt_url():
url = self.watchtower_url_e.text() or None
watchtower_url = self.config.set_key('watchtower_url', url)
self.watchtower_url_e.editingFinished.connect(on_wt_url)
msg = _('OpenAlias record, used to receive coins and to sign payment requests.') + '\n\n'\
+ _('The following alias providers are available:') + '\n'\
+ '\n'.join(['https://cryptoname.co/', 'http://xmr.link']) + '\n\n'\
+ 'For more information, see https://openalias.org'
alias_label = HelpLabel(_('OpenAlias') + ':', msg)
alias = self.config.get('alias','')
self.alias_e = QLineEdit(alias)
self.set_alias_color()
self.alias_e.editingFinished.connect(self.on_alias_edit)
msat_cb = QCheckBox(_("Show Lightning amounts with msat precision"))
msat_cb.setChecked(bool(self.config.get('amt_precision_post_satoshi', False)))
def on_msat_checked(v):
prec = 3 if v == Qt.Checked else 0
if self.config.amt_precision_post_satoshi != prec:
self.config.amt_precision_post_satoshi = prec
self.config.set_key('amt_precision_post_satoshi', prec)
self.app.refresh_tabs_signal.emit()
msat_cb.stateChanged.connect(on_msat_checked)
# units
units = base_units_list
msg = (_('Base unit of your wallet.')
+ '\n1 LTC = 1000 mLTC. 1 mLTC = 1000 uLTC. 1 uLTC = 100 sat.\n'
+ _('This setting affects the Send tab, and all balance related fields.'))
unit_label = HelpLabel(_('Base unit') + ':', msg)
unit_combo = QComboBox()
unit_combo.addItems(units)
unit_combo.setCurrentIndex(units.index(self.config.get_base_unit()))
def on_unit(x, nz):
unit_result = units[unit_combo.currentIndex()]
if self.config.get_base_unit() == unit_result:
return
self.config.set_base_unit(unit_result)
nz.setMaximum(self.config.decimal_point)
self.app.refresh_tabs_signal.emit()
self.app.update_status_signal.emit()
self.app.refresh_amount_edits_signal.emit()
unit_combo.currentIndexChanged.connect(lambda x: on_unit(x, nz))
thousandsep_cb = QCheckBox(_("Add thousand separators to litecoin amounts"))
thousandsep_cb.setChecked(bool(self.config.get('amt_add_thousands_sep', False)))
def on_set_thousandsep(v):
checked = v == Qt.Checked
if self.config.amt_add_thousands_sep != checked:
self.config.amt_add_thousands_sep = checked
self.config.set_key('amt_add_thousands_sep', checked)
self.app.refresh_tabs_signal.emit()
thousandsep_cb.stateChanged.connect(on_set_thousandsep)
qr_combo = QComboBox()
qr_combo.addItem("Default", "default")
msg = (_("For scanning QR codes.") + "\n"
+ _("Install the zbar package to enable this."))
qr_label = HelpLabel(_('Video Device') + ':', msg)
from .qrreader import find_system_cameras
system_cameras = find_system_cameras()
for cam_desc, cam_path in system_cameras.items():
qr_combo.addItem(cam_desc, cam_path)
index = qr_combo.findData(self.config.get("video_device"))
qr_combo.setCurrentIndex(index)
on_video_device = lambda x: self.config.set_key("video_device", qr_combo.itemData(x), True)
qr_combo.currentIndexChanged.connect(on_video_device)
colortheme_combo = QComboBox()
colortheme_combo.addItem(_('Light'), 'default')
colortheme_combo.addItem(_('Dark'), 'dark')
index = colortheme_combo.findData(self.config.get('qt_gui_color_theme', 'default'))
colortheme_combo.setCurrentIndex(index)
colortheme_label = QLabel(_('Color theme') + ':')
def on_colortheme(x):
self.config.set_key('qt_gui_color_theme', colortheme_combo.itemData(x), True)
self.need_restart = True
colortheme_combo.currentIndexChanged.connect(on_colortheme)
updatecheck_cb = QCheckBox(_("Automatically check for software updates"))
updatecheck_cb.setChecked(bool(self.config.get('check_updates', False)))
def on_set_updatecheck(v):
self.config.set_key('check_updates', v == Qt.Checked, save=True)
updatecheck_cb.stateChanged.connect(on_set_updatecheck)
filelogging_cb = QCheckBox(_("Write logs to file"))
filelogging_cb.setChecked(bool(self.config.get('log_to_file', False)))
def on_set_filelogging(v):
self.config.set_key('log_to_file', v == Qt.Checked, save=True)
self.need_restart = True
filelogging_cb.stateChanged.connect(on_set_filelogging)
filelogging_cb.setToolTip(_('Debug logs can be persisted to disk. These are useful for troubleshooting.'))
preview_cb = QCheckBox(_('Advanced preview'))
preview_cb.setChecked(bool(self.config.get('advanced_preview', False)))
preview_cb.setToolTip(_("Open advanced transaction preview dialog when 'Pay' is clicked."))
def on_preview(x):
self.config.set_key('advanced_preview', x == Qt.Checked)
preview_cb.stateChanged.connect(on_preview)
usechange_cb = QCheckBox(_('Use change addresses'))
usechange_cb.setChecked(self.wallet.use_change)
if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False)
def on_usechange(x):
usechange_result = x == Qt.Checked
if self.wallet.use_change != usechange_result:
self.wallet.use_change = usechange_result
self.wallet.db.put('use_change', self.wallet.use_change)
multiple_cb.setEnabled(self.wallet.use_change)
usechange_cb.stateChanged.connect(on_usechange)
usechange_cb.setToolTip(_('Using change addresses makes it more difficult for other people to track your transactions.'))
def on_multiple(x):
multiple = x == Qt.Checked
if self.wallet.multiple_change != multiple:
self.wallet.multiple_change = multiple
self.wallet.db.put('multiple_change', multiple)
multiple_change = self.wallet.multiple_change
multiple_cb = QCheckBox(_('Use multiple change addresses'))
multiple_cb.setEnabled(self.wallet.use_change)
multiple_cb.setToolTip('\n'.join([
_('In some cases, use up to 3 change addresses in order to break '
'up large coin amounts and obfuscate the recipient address.'),
_('This may result in higher transactions fees.')
]))
multiple_cb.setChecked(multiple_change)
multiple_cb.stateChanged.connect(on_multiple)
def fmt_docs(key, klass):
lines = [ln.lstrip(" ") for ln in klass.__doc__.split("\n")]
return '\n'.join([key, "", " ".join(lines)])
choosers = sorted(coinchooser.COIN_CHOOSERS.keys())
if len(choosers) > 1:
chooser_name = coinchooser.get_name(self.config)
msg = _('Choose coin (UTXO) selection method. The following are available:\n\n')
msg += '\n\n'.join(fmt_docs(*item) for item in coinchooser.COIN_CHOOSERS.items())
chooser_label = HelpLabel(_('Coin selection') + ':', msg)
chooser_combo = QComboBox()
chooser_combo.addItems(choosers)
i = choosers.index(chooser_name) if chooser_name in choosers else 0
chooser_combo.setCurrentIndex(i)
def on_chooser(x):
chooser_name = choosers[chooser_combo.currentIndex()]
self.config.set_key('coin_chooser', chooser_name)
chooser_combo.currentIndexChanged.connect(on_chooser)
def on_unconf(x):
self.config.set_key('confirmed_only', bool(x))
conf_only = bool(self.config.get('confirmed_only', False))
unconf_cb = QCheckBox(_('Spend only confirmed coins'))
unconf_cb.setToolTip(_('Spend only confirmed inputs.'))
unconf_cb.setChecked(conf_only)
unconf_cb.stateChanged.connect(on_unconf)
def on_outrounding(x):
self.config.set_key('coin_chooser_output_rounding', bool(x))
enable_outrounding = bool(self.config.get('coin_chooser_output_rounding', True))
outrounding_cb = QCheckBox(_('Enable output value rounding'))
outrounding_cb.setToolTip(
_('Set the value of the change output so that it has similar precision to the other outputs.') + '\n' +
_('This might improve your privacy somewhat.') + '\n' +
_('If enabled, at most 100 satoshis might be lost due to this, per transaction.'))
outrounding_cb.setChecked(enable_outrounding)
outrounding_cb.stateChanged.connect(on_outrounding)
block_explorers = sorted(util.block_explorer_info().keys())
BLOCK_EX_CUSTOM_ITEM = _("Custom URL")
if BLOCK_EX_CUSTOM_ITEM in block_explorers: # malicious translation?
block_explorers.remove(BLOCK_EX_CUSTOM_ITEM)
block_explorers.append(BLOCK_EX_CUSTOM_ITEM)
msg = _('Choose which online block explorer to use for functions that open a web browser')
block_ex_label = HelpLabel(_('Online Block Explorer') + ':', msg)
block_ex_combo = QComboBox()
block_ex_custom_e = QLineEdit(str(self.config.get('block_explorer_custom') or ''))
block_ex_combo.addItems(block_explorers)
block_ex_combo.setCurrentIndex(
block_ex_combo.findText(util.block_explorer(self.config) or BLOCK_EX_CUSTOM_ITEM))
def showhide_block_ex_custom_e():
block_ex_custom_e.setVisible(block_ex_combo.currentText() == BLOCK_EX_CUSTOM_ITEM)
showhide_block_ex_custom_e()
def on_be_combo(x):
if block_ex_combo.currentText() == BLOCK_EX_CUSTOM_ITEM:
on_be_edit()
else:
be_result = block_explorers[block_ex_combo.currentIndex()]
self.config.set_key('block_explorer_custom', None, False)
self.config.set_key('block_explorer', be_result, True)
showhide_block_ex_custom_e()
block_ex_combo.currentIndexChanged.connect(on_be_combo)
def on_be_edit():
val = block_ex_custom_e.text()
try:
val = ast.literal_eval(val) # to also accept tuples
except:
pass
self.config.set_key('block_explorer_custom', val)
block_ex_custom_e.editingFinished.connect(on_be_edit)
block_ex_hbox = QHBoxLayout()
block_ex_hbox.setContentsMargins(0, 0, 0, 0)
block_ex_hbox.setSpacing(0)
block_ex_hbox.addWidget(block_ex_combo)
block_ex_hbox.addWidget(block_ex_custom_e)
block_ex_hbox_w = QWidget()
block_ex_hbox_w.setLayout(block_ex_hbox)
# Fiat Currency
hist_checkbox = QCheckBox()
hist_capgains_checkbox = QCheckBox()
fiat_address_checkbox = QCheckBox()
ccy_combo = QComboBox()
ex_combo = QComboBox()
def update_currencies():
if not self.fx:
return
currencies = sorted(self.fx.get_currencies(self.fx.get_history_config()))
ccy_combo.clear()
ccy_combo.addItems([_('None')] + currencies)
if self.fx.is_enabled():
ccy_combo.setCurrentIndex(ccy_combo.findText(self.fx.get_currency()))
def update_history_cb():
if not self.fx: return
hist_checkbox.setChecked(self.fx.get_history_config())
hist_checkbox.setEnabled(self.fx.is_enabled())
def update_fiat_address_cb():
if not self.fx: return
fiat_address_checkbox.setChecked(self.fx.get_fiat_address_config())
def update_history_capgains_cb():
if not self.fx: return
hist_capgains_checkbox.setChecked(self.fx.get_history_capital_gains_config())
hist_capgains_checkbox.setEnabled(hist_checkbox.isChecked())
def update_exchanges():
if not self.fx: return
b = self.fx.is_enabled()
ex_combo.setEnabled(b)
if b:
h = self.fx.get_history_config()
c = self.fx.get_currency()
exchanges = self.fx.get_exchanges_by_ccy(c, h)
else:
exchanges = self.fx.get_exchanges_by_ccy('USD', False)
ex_combo.blockSignals(True)
ex_combo.clear()
ex_combo.addItems(sorted(exchanges))
ex_combo.setCurrentIndex(ex_combo.findText(self.fx.config_exchange()))
ex_combo.blockSignals(False)
def on_currency(hh):
if not self.fx: return
b = bool(ccy_combo.currentIndex())
ccy = str(ccy_combo.currentText()) if b else None
self.fx.set_enabled(b)
if b and ccy != self.fx.ccy:
self.fx.set_currency(ccy)
update_history_cb()
update_exchanges()
self.app.update_fiat_signal.emit()
def on_exchange(idx):
exchange = str(ex_combo.currentText())
if self.fx and self.fx.is_enabled() and exchange and exchange != self.fx.exchange.name():
self.fx.set_exchange(exchange)
def on_history(checked):
if not self.fx: return
self.fx.set_history_config(checked)
update_exchanges()
if self.fx.is_enabled() and checked:
self.fx.trigger_update()
update_history_capgains_cb()
self.app.update_fiat_signal.emit()
def on_history_capgains(checked):
if not self.fx: return
self.fx.set_history_capital_gains_config(checked)
self.app.update_fiat_signal.emit()
def on_fiat_address(checked):
if not self.fx: return
self.fx.set_fiat_address_config(checked)
self.app.update_fiat_signal.emit()
update_currencies()
update_history_cb()
update_history_capgains_cb()
update_fiat_address_cb()
update_exchanges()
ccy_combo.currentIndexChanged.connect(on_currency)
hist_checkbox.stateChanged.connect(on_history)
hist_capgains_checkbox.stateChanged.connect(on_history_capgains)
fiat_address_checkbox.stateChanged.connect(on_fiat_address)
ex_combo.currentIndexChanged.connect(on_exchange)
gui_widgets = []
gui_widgets.append((lang_label, lang_combo))
gui_widgets.append((colortheme_label, colortheme_combo))
gui_widgets.append((unit_label, unit_combo))
gui_widgets.append((nz_label, nz))
gui_widgets.append((msat_cb, None))
gui_widgets.append((thousandsep_cb, None))
invoices_widgets = []
invoices_widgets.append((bolt11_fallback_cb, None))
invoices_widgets.append((bip21_lightning_cb, None))
tx_widgets = []
tx_widgets.append((usechange_cb, None))
tx_widgets.append((use_rbf_cb, None))
tx_widgets.append((batch_rbf_cb, None))
tx_widgets.append((preview_cb, None))
tx_widgets.append((unconf_cb, None))
tx_widgets.append((multiple_cb, None))
tx_widgets.append((outrounding_cb, None))
if len(choosers) > 1:
tx_widgets.append((chooser_label, chooser_combo))
tx_widgets.append((block_ex_label, block_ex_hbox_w))
lightning_widgets = []
lightning_widgets.append((recov_cb, None))
lightning_widgets.append((trampoline_cb, None))
lightning_widgets.append((instant_swaps_cb, None))
lightning_widgets.append((remote_wt_cb, self.watchtower_url_e))
fiat_widgets = []
fiat_widgets.append((QLabel(_('Fiat currency')), ccy_combo))
fiat_widgets.append((QLabel(_('Source')), ex_combo))
fiat_widgets.append((QLabel(_('Show history rates')), hist_checkbox))
fiat_widgets.append((QLabel(_('Show capital gains in history')), hist_capgains_checkbox))
fiat_widgets.append((QLabel(_('Show Fiat balance for addresses')), fiat_address_checkbox))
misc_widgets = []
misc_widgets.append((updatecheck_cb, None))
misc_widgets.append((filelogging_cb, None))
misc_widgets.append((alias_label, self.alias_e))
misc_widgets.append((qr_label, qr_combo))
tabs_info = [
(gui_widgets, _('Appearance')),
(tx_widgets, _('Transactions')),
(invoices_widgets, _('Invoices')),
(lightning_widgets, _('Lightning')),
(fiat_widgets, _('Fiat')),
(misc_widgets, _('Misc')),
]
for widgets, name in tabs_info:
tab = QWidget()
tab_vbox = QVBoxLayout(tab)
grid = QGridLayout()
for a,b in widgets:
i = grid.rowCount()
if b:
if a:
grid.addWidget(a, i, 0)
grid.addWidget(b, i, 1)
else:
grid.addWidget(a, i, 0, 1, 2)
tab_vbox.addLayout(grid)
tab_vbox.addStretch(1)
tabs.addTab(tab, name)
vbox.addWidget(tabs)
vbox.addStretch(1)
vbox.addLayout(Buttons(CloseButton(self)))
self.setLayout(vbox)
@event_listener
def on_event_alias_received(self):
self.app.alias_received_signal.emit()
def set_alias_color(self):
if not self.config.get('alias'):
self.alias_e.setStyleSheet("")
return
if self.wallet.contacts.alias_info:
alias_addr, alias_name, validated = self.wallet.contacts.alias_info
self.alias_e.setStyleSheet((ColorScheme.GREEN if validated else ColorScheme.RED).as_stylesheet(True))
else:
self.alias_e.setStyleSheet(ColorScheme.RED.as_stylesheet(True))
def on_alias_edit(self):
self.alias_e.setStyleSheet("")
alias = str(self.alias_e.text())
self.config.set_key('alias', alias, True)
if alias:
self.wallet.contacts.fetch_openalias(self.config)
def closeEvent(self, event):
self.unregister_callbacks()
try:
self.app.alias_received_signal.disconnect(self.set_alias_color)
except TypeError:
pass # 'method' object is not connected
event.accept()
| {
"content_hash": "7c034d34630e4a34cbea56f13f2a3fc1",
"timestamp": "",
"source": "github",
"line_count": 565,
"max_line_length": 148,
"avg_line_length": 47.92920353982301,
"alnum_prop": 0.6125553914327917,
"repo_name": "pooler/electrum-ltc",
"id": "3e9db0f96958432e1268b6f1ef027d970602ec5e",
"size": "28245",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "electrum_ltc/gui/qt/settings_dialog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "13024"
},
{
"name": "GLSL",
"bytes": "289"
},
{
"name": "Java",
"bytes": "2929"
},
{
"name": "Makefile",
"bytes": "2193"
},
{
"name": "NSIS",
"bytes": "7354"
},
{
"name": "Python",
"bytes": "5325268"
},
{
"name": "QML",
"bytes": "318745"
},
{
"name": "Ruby",
"bytes": "16856"
},
{
"name": "Shell",
"bytes": "105672"
},
{
"name": "kvlang",
"bytes": "70748"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import json
import itertools
def run_funcs(module_names, run_names, repeat):
from .utils.importer import import_object
from .utils.timing import timed
for i in range(repeat):
for m, r in itertools.product(module_names, run_names):
func = import_object('.' + m, __name__).prepare(r)
with timed() as data:
func()
yield dict({
'try': i,
'module': m,
'run': r,
}, **data)
def cli_run(output, **kwds):
"""
Run benchmarks.
"""
benchmark = list(run_funcs(**kwds))
try:
json.dump(dict(benchmark=benchmark, lang='python'), output)
finally:
output.close()
def make_parser(doc=__doc__):
import argparse
class FormatterClass(argparse.RawDescriptionHelpFormatter,
argparse.ArgumentDefaultsHelpFormatter):
pass
parser = argparse.ArgumentParser(
formatter_class=FormatterClass,
description=doc)
subparsers = parser.add_subparsers()
def subp(command, func):
doc = func.__doc__
title = None
for title in filter(None, map(str.strip, (doc or '').splitlines())):
break
p = subparsers.add_parser(
command,
formatter_class=FormatterClass,
help=title,
description=doc)
p.set_defaults(func=func)
return p
def csv(x):
return x.split(',')
p = subp('run', cli_run)
p.add_argument('module_names', type=csv)
p.add_argument('run_names', type=csv, nargs='?', default='default')
p.add_argument('--output', default='-', type=argparse.FileType('w'))
p.add_argument('--repeat', type=int, default=5)
return parser
def main(args=None):
parser = make_parser()
ns = parser.parse_args(args)
return (lambda func, **kwds: func(**kwds))(**vars(ns))
if __name__ == '__main__':
main()
| {
"content_hash": "a3210f43a20eb48e9dad3ffdada4b886",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 76,
"avg_line_length": 25.602564102564102,
"alnum_prop": 0.5658487731597396,
"repo_name": "tkf/comparatist",
"id": "9b93fc8780bdb00edd7d27be4e8c93f23c9b4902",
"size": "1997",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/comparatist/cli.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2940"
},
{
"name": "Julia",
"bytes": "4898"
},
{
"name": "Makefile",
"bytes": "271"
},
{
"name": "Python",
"bytes": "22452"
},
{
"name": "Shell",
"bytes": "184"
}
],
"symlink_target": ""
} |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='polygamy',
version='0.1.2',
description='Handle multiple SCM repositories easily.',
long_description=readme + '\n\n' + history,
author='Chris Trotman',
author_email='chris@trotman.io',
url='https://github.com/solarnz/polygamy',
packages=[
'polygamy',
],
include_package_data=True,
install_requires=[
'blessings == 1.5.1',
'gevent >= 1.0',
'tabulate == 0.7.2',
],
license="BSD",
zip_safe=False,
keywords='polygamy',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
entry_points={
'console_scripts': [
'polygamy = polygamy:main'
]
},
)
| {
"content_hash": "3be4ee58d72a279a8f0e20c3f6d00117",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 66,
"avg_line_length": 27,
"alnum_prop": 0.5725308641975309,
"repo_name": "solarnz/polygamy",
"id": "ff5d458a2267f8891e13525ee2689c22af7dfea1",
"size": "1343",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "setup.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "40341"
},
{
"name": "Shell",
"bytes": "6466"
}
],
"symlink_target": ""
} |
# Create your views here.
from django import forms
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseRedirect
from django.template.loader import get_template
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
# app specific files
from models import *
from forms import *
def create_book(request):
form = BookForm(request.POST or None)
if form.is_valid():
form.save()
form = BookForm()
t = get_template('create_book.html')
c = RequestContext(request,locals())
return HttpResponse(t.render(c))
def list_book(request):
list_items = Book.objects.all()
paginator = Paginator(list_items ,10)
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
try:
list_items = paginator.page(page)
except :
list_items = paginator.page(paginator.num_pages)
t = get_template('list_book.html')
c = RequestContext(request,locals())
return HttpResponse(t.render(c))
def view_book(request, id):
book_instance = Book.objects.get(id = id)
t=get_template('view_book.html')
c=RequestContext(request,locals())
return HttpResponse(t.render(c))
def edit_book(request, id):
book_instance = Book.objects.get(id=id)
form = BookForm(request.POST or None, instance = book_instance)
if form.is_valid():
form.save()
t=get_template('edit_book.html')
c=RequestContext(request,locals())
return HttpResponse(t.render(c))
| {
"content_hash": "86aedef34f252d5a51f94b621975286d",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 67,
"avg_line_length": 23.565217391304348,
"alnum_prop": 0.6482164821648216,
"repo_name": "yokiwhh/bookLab7",
"id": "382e340b1cce58d4d00a39d96722d5bf17b98f3a",
"size": "1626",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bookapp/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "10670"
},
{
"name": "Python",
"bytes": "18373"
}
],
"symlink_target": ""
} |
from backend.api.base import BaseHandler
from backend.common.errors import SchemaError
class Handler(BaseHandler):
_schema = None
def get(self, *args, **kwargs):
try:
schema, validate = self.validate_schema()
if not validate:
raise SchemaError(schema.errors)
self.get_json_response_and_finish()
except SchemaError, e:
self.get_json_error_response_and_finish(e)
except Exception, e:
self.get_json_exception_response_and_finish(e)
handlers_list = [
(r'/s/path/(?P<sid>[a-z0-9]+)/?', Handler)
]
| {
"content_hash": "bda5c561d71566b383046bf0c84bbfc5",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 58,
"avg_line_length": 27.681818181818183,
"alnum_prop": 0.6091954022988506,
"repo_name": "alejandrobernardis/python-slot-machines",
"id": "86f8395b9a338a6a8c4af2bb6b0aa838b8eb8ff7",
"size": "860",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/backend/backend/api/private/game.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "83505"
}
],
"symlink_target": ""
} |
"""Ashier: Template-based scripting for terminal interactions.
Ashier is a program that serves the same purpose as expect(1): it helps
users script terminal interactions. However, unlike expect, Ashier is
programming language agnostic and provides a readable template language
for terminal output matching. These features make scripted terminal
interactions simpler to create and easier to maintain.
This module contains unit tests for the linebuf module.
"""
__author__ = 'cklin@google.com (Chuan-kai Lin)'
import random
import re
import unittest
from .. import linebuf
class TestBuffer(unittest.TestCase):
"""Unit tests for linebuf.Buffer."""
def testBaseline(self):
"""Tests for Buffer.baseline.
Buffer.baseline should be updated only by calls to
Buffer.UpdateBaseline() and should not be updated by calls to
Buffer.AppendRawData(). Buffer.UpdateBaseline() should keep
Buffer.baseline unchanged and throw an AssertionError exception
when its argument is out of range (i.e., lower than current
baseline or higher than the maximum line number that currently
exists in the buffer).
"""
buf = linebuf.Buffer()
self.assertEqual(buf.baseline, 1)
self.assertRaises(AssertionError, buf.UpdateBaseline, 3)
self.assertEqual(buf.baseline, 1)
buf.UpdateBaseline(2)
self.assertEqual(buf.baseline, 2)
self.assertRaises(AssertionError, buf.UpdateBaseline, 1)
self.assertRaises(AssertionError, buf.UpdateBaseline, 3)
self.assertEqual(buf.baseline, 2)
buf.AppendRawData('a\n\rb\n\rccc')
buf.UpdateBaseline(3)
self.assertEqual(buf.baseline, 3)
self.assertRaises(AssertionError, buf.UpdateBaseline, 2)
self.assertRaises(AssertionError, buf.UpdateBaseline, 5)
self.assertEqual(buf.baseline, 3)
buf.AppendRawData('c\n')
self.assertEqual(buf.baseline, 3)
def testBound(self):
"""Tests for Buffer.GetBound().
Buffer.GetBound() should represent the upper line number limit
(i.e., 1+ the maximum valid line number) of the Buffer object,
where lines are separated by individual LF characters.
"""
buf = linebuf.Buffer()
self.assertEqual(buf.GetBound(), 2)
buf.AppendRawData('a\r\nb\r\nccc')
self.assertEqual(buf.GetBound(), 4)
buf.AppendRawData('c\n')
self.assertEqual(buf.GetBound(), 5)
buf.AppendRawData('\r\r')
self.assertEqual(buf.GetBound(), 5)
def testFragmentation(self):
"""Tests for Buffer input fragmentation handling.
The data stored in a Buffer object should be the same regardless
of how the input is fragmented across Buffer.AppendRawData()
calls. This test feeds a test string into a Buffer object in
randomly generated segments and check that the output remains
identical regardless of input string segmentation.
"""
rawstring = ('Pack my\r\r\nred\r\nbox\nwith five\r\n'
'dozen\rquality\r\n\r\r\njugs.\r\n')
expected_output = re.split('\r*\n', rawstring)[:-1]
random.seed(1337)
for unused_count in range(100):
buf = linebuf.Buffer()
source = rawstring
output = []
while source:
take = random.randint(1, 8)
buf.AppendRawData(source[:take])
source = source[take:]
while buf.baseline < buf.GetBound()-1:
output.append(buf.GetLine(buf.baseline))
buf.UpdateBaseline(buf.baseline+1)
self.assertEqual(output, expected_output)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "18d7e9cfbe47b206bee847543efb9793",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 71,
"avg_line_length": 34.1764705882353,
"alnum_prop": 0.7053930005737234,
"repo_name": "google/ashier",
"id": "9f43cb0acbfa646615b520ee6ae7453dfa46c8a9",
"size": "4103",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ashierlib/test/linebuf_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "63595"
}
],
"symlink_target": ""
} |
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_param_LiveProcess', [dirname(__file__)])
except ImportError:
import _param_LiveProcess
return _param_LiveProcess
if fp is not None:
try:
_mod = imp.load_module('_param_LiveProcess', fp, pathname, description)
finally:
fp.close()
return _mod
_param_LiveProcess = swig_import_helper()
del swig_import_helper
else:
import _param_LiveProcess
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import String_vector
import param_Process
import param_System
import enum_MemoryMode
import AbstractMemory_vector
import param_AbstractMemory
import range
import param_MemObject
import param_SimObject
class LiveProcess(param_Process.Process):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
LiveProcess_swigregister = _param_LiveProcess.LiveProcess_swigregister
LiveProcess_swigregister(LiveProcess)
class LiveProcessParams(param_Process.ProcessParams):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def create(self): return _param_LiveProcess.LiveProcessParams_create(self)
executable = _swig_property(_param_LiveProcess.LiveProcessParams_executable_get, _param_LiveProcess.LiveProcessParams_executable_set)
uid = _swig_property(_param_LiveProcess.LiveProcessParams_uid_get, _param_LiveProcess.LiveProcessParams_uid_set)
cmd = _swig_property(_param_LiveProcess.LiveProcessParams_cmd_get, _param_LiveProcess.LiveProcessParams_cmd_set)
pid = _swig_property(_param_LiveProcess.LiveProcessParams_pid_get, _param_LiveProcess.LiveProcessParams_pid_set)
simpoint = _swig_property(_param_LiveProcess.LiveProcessParams_simpoint_get, _param_LiveProcess.LiveProcessParams_simpoint_set)
egid = _swig_property(_param_LiveProcess.LiveProcessParams_egid_get, _param_LiveProcess.LiveProcessParams_egid_set)
euid = _swig_property(_param_LiveProcess.LiveProcessParams_euid_get, _param_LiveProcess.LiveProcessParams_euid_set)
env = _swig_property(_param_LiveProcess.LiveProcessParams_env_get, _param_LiveProcess.LiveProcessParams_env_set)
gid = _swig_property(_param_LiveProcess.LiveProcessParams_gid_get, _param_LiveProcess.LiveProcessParams_gid_set)
ppid = _swig_property(_param_LiveProcess.LiveProcessParams_ppid_get, _param_LiveProcess.LiveProcessParams_ppid_set)
cwd = _swig_property(_param_LiveProcess.LiveProcessParams_cwd_get, _param_LiveProcess.LiveProcessParams_cwd_set)
def __init__(self):
this = _param_LiveProcess.new_LiveProcessParams()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _param_LiveProcess.delete_LiveProcessParams
__del__ = lambda self : None;
LiveProcessParams_swigregister = _param_LiveProcess.LiveProcessParams_swigregister
LiveProcessParams_swigregister(LiveProcessParams)
| {
"content_hash": "b4b65c6f2acbcc1a1b819177ae7b1e2b",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 137,
"avg_line_length": 43,
"alnum_prop": 0.694587363655073,
"repo_name": "silkyar/570_Big_Little",
"id": "15c11f12908fde89202bc2733926a985aac3c012",
"size": "5063",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build/ARM/python/m5/internal/param_LiveProcess.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "232078"
},
{
"name": "C",
"bytes": "887097"
},
{
"name": "C++",
"bytes": "52497889"
},
{
"name": "D",
"bytes": "13736198"
},
{
"name": "Emacs Lisp",
"bytes": "1969"
},
{
"name": "Java",
"bytes": "3096"
},
{
"name": "JavaScript",
"bytes": "78818"
},
{
"name": "Perl",
"bytes": "13199821"
},
{
"name": "Prolog",
"bytes": "977139"
},
{
"name": "Python",
"bytes": "3831426"
},
{
"name": "Ruby",
"bytes": "19404"
},
{
"name": "Scilab",
"bytes": "14370"
},
{
"name": "Shell",
"bytes": "16704"
},
{
"name": "Visual Basic",
"bytes": "2884"
},
{
"name": "XML",
"bytes": "16048"
}
],
"symlink_target": ""
} |
"""
Curious - An async Python 3.6+ library for Discord bots.
.. currentmodule:: curious
.. autosummary::
:toctree:
core
commands
dataclasses
ext.paginator
exc
util
"""
from __future__ import generator_stop # enforce generator stop
import sys
from pkg_resources import DistributionNotFound, get_distribution
try:
__version__ = get_distribution("discord-curious").version
except DistributionNotFound:
__version__ = "0.0.0"
_fmt = "DiscordBot (https://github.com/Fuyukai/curious {0}) Python/{1[0]}.{1[1]}"
USER_AGENT = _fmt.format(__version__, sys.version_info)
del _fmt
from curious.core.client import BotType, Client
from curious.core.event import EventContext, event
from curious.core.gateway import open_websocket, GatewayHandler
from curious.core.state import State
from curious.dataclasses.appinfo import AppInfo
from curious.dataclasses.attachment import Attachment
from curious.dataclasses.bases import Dataclass, IDObject
from curious.dataclasses.channel import Channel, ChannelType
from curious.dataclasses.embed import Embed
from curious.dataclasses.emoji import Emoji
from curious.dataclasses.guild import ContentFilterLevel, Guild, GuildChannelWrapper, \
GuildEmojiWrapper, GuildRoleWrapper, MFALevel, NotificationLevel, VerificationLevel
from curious.dataclasses.invite import Invite, InviteChannel, InviteGuild, InviteMetadata
from curious.dataclasses.member import Member
from curious.dataclasses.message import Message
from curious.dataclasses.presence import Game, Presence, Status
from curious.dataclasses.reaction import Reaction
from curious.dataclasses.role import Role
from curious.dataclasses.search import SearchQuery, SearchResults
from curious.dataclasses.user import User
from curious.dataclasses.voice_state import VoiceState
from curious.dataclasses.webhook import Webhook
from curious.dataclasses.widget import Widget, WidgetChannel, WidgetGuild, WidgetMember
# for asks
# import multio
# multio.init('curio')
| {
"content_hash": "a36aca7aba3ad2c1eeccf160f4b83fbe",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 89,
"avg_line_length": 33.88135593220339,
"alnum_prop": 0.7968984492246123,
"repo_name": "SunDwarf/curious",
"id": "f91e8bdd2bda3b69f9671a61fccb7c5e06265a75",
"size": "2674",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "curious/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "522950"
}
],
"symlink_target": ""
} |
"""System call wrapper.
Defines classes that wraps around direct calls via Python's Popen interface.
Also, AdbWrapper that wraps around the type of ADB calls that are used by the
other scripts.
"""
import logging
import subprocess
class SyscallWrapper:
"""A class that wraps around different modes of doing os system calls.
"""
def __init__(self, logger):
self._logger = logger
self.reset()
def reset(self):
self.last_command = ""
self.error_occured = False
self.error_message = ""
self.intermediate_result = []
self.intermediate_error = []
self.result_final = []
self.return_code = 0
def call_returnable_command_ignore_output(self, cmd):
"""Makes calls that usually returns but ignores all results/errors.
Example of what this might be suitable for is 'adb logcat -c'
Args:
cmd: The command to be executed.
"""
self.reset()
logger = self._logger
logger.debug("cmd: {}".format(cmd))
self.last_command = cmd
cmd_pid = subprocess.Popen(cmd)
while cmd_pid.poll() is None:
pass
def call_returnable_command(self, cmd):
"""Handles the type of calls that returns.
For these type of calls, we can just pull the results when the call ends.
Args:
cmd: The command to be executed.
"""
self.reset()
logger = self._logger
logger.debug("cmd: {}".format(cmd))
self.last_command = cmd
cmd_pid = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while True:
self.return_code = cmd_pid.poll()
try:
line = cmd_pid.stdout.readline().decode("utf-8", "replace").strip()
if line:
self.result_final.append(line)
except KeyboardInterrupt:
self.error_occured = True
self.error_message = "Terminated by keyboard interrupt."
break
if not line and self.return_code is not None:
break
if self.return_code == 0:
return
# try to extract error message from stderr
logger.debug("return code: {}".format(self.return_code))
self.error_occured = True
self.error_final = []
while True:
try:
line = cmd_pid.stderr.readline().decode("utf-8", "replace").strip()
if line:
self.error_final.append(line)
except KeyboardInterrupt:
self.error_message = ("Error message extraction interrupted by keyboard"
" interrupt:")
break
if not line:
break
self.error_message = "\n".join(self.error_final)
return
def call_non_returnable_command(self, cmd, terminate):
"""Handles the type of calls that do not return (e.g. adb logcat, tail).
These type of calls are much more complicated, and rely upon correct
implementation of a terminating function to tell us when to stop.
TODO(billy): Consider accepting and handling a timeout value that terminates
the execution of the command regardless of state so that we don't risk
looping indefinitely.
Args:
cmd: The command to be executed.
terminate: A function that determines if the call is to be terminated.
It must take a buffer, logger, and an outgoing buffer as
parameter.
"""
self.reset()
logger = self._logger
logger.debug("cmd: {}".format(cmd))
self.last_command = cmd
cmd_pid = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while cmd_pid.poll() is None:
try:
line = cmd_pid.stdout.readline().decode("utf-8", "replace").strip()
if line:
self.intermediate_result.append(line)
result = terminate(line, logger)
if result:
self.error_occured = False
logger.debug("Found terminating condition... Result: %s", result)
cmd_pid.kill()
self.result_final.append(result)
break
except KeyboardInterrupt:
self.error_occured = True
self.error_message = "Execution interrupted by keyboard."
cmd_pid.kill()
break
return
class AdbWrapper:
"""A class that wraps around various ADB calls.
"""
BASE_ADB_COMMAND = ["adb"]
def __init__(self, serial_number, logger=None):
if not logger:
self._logger = AdbWrapper.set_up_default_logger()
else:
self._logger = logger
self.syscall_wrapper = SyscallWrapper(logger)
self.device_serial_number = serial_number
self.adb_command = AdbWrapper.BASE_ADB_COMMAND + ["-s", serial_number]
self.adb_shell_command = self.adb_command + ["shell"]
@staticmethod
def set_up_default_logger():
"""Sets up the default logger for this class.
Logging level is set to ERROR.
Returns:
A logger object.
"""
logger = logging.getLogger(__name__)
logger.setLevel(logging.WARNING)
s_handler = logging.StreamHandler()
s_format = logging.Formatter(
"%(levelname)s:%(filename)s:%(funcName)s(%(lineno)d): %(message)s")
s_handler.setFormatter(s_format)
logger.addHandler(s_handler)
return logger
def get_result(self):
return self.syscall_wrapper.result_final
@staticmethod
def start_server(logger=None):
"""Calls 'adb start-server' to start ADB server.
Args:
logger: An optional logger object used for logging.
Returns:
A boolean indicating the success of this operation.
"""
if not logger:
logger = AdbWrapper.set_up_default_logger()
cmd = AdbWrapper.BASE_ADB_COMMAND[:]
cmd.append("start-server")
logger.debug("cmd: %s", cmd)
sw = SyscallWrapper(logger)
sw.call_returnable_command(cmd)
if sw.error_occured:
logger.error("Failed to start adb server: %s", sw.error_message)
return False
return True
@staticmethod
def devices(logger=None):
"""Calls 'adb devices -l' to list connected devices via ADB.
Args:
logger: An optional logger object used for logging.
Returns:
A list of DeviceInfo containing all connected devices.
<code>None</code> is returned if error occurs.
"""
if not logger:
logger = AdbWrapper.set_up_default_logger()
cmd = AdbWrapper.BASE_ADB_COMMAND[:]
cmd.extend(["devices", "-l"])
logger.debug("cmd: %s", cmd)
sw = SyscallWrapper(logger)
sw.call_returnable_command(cmd)
if sw.error_occured:
logger.error("Failure: %s", sw.error_final)
return None
if len(sw.result_final) < 2:
logger.warning("No devices connected!")
return []
connected_devices = []
for line in sw.result_final[1:]:
logger.debug("line: %s", line)
device = DeviceInfo()
line_components = line.split()
device.serial_number = line_components[0]
if "unauthorized" in line:
logger.warning("ADB has not been authorized for device with serial "
"number %s", device.serial_number)
device.unauthorized = True
connected_devices.append(device)
continue
for component in line_components:
if component.find("product:") == 0:
device.product_name = component.split(":")[1].strip()
elif component.find("model:") == 0:
device.model_name = component.split(":")[1].strip()
elif component.find("device:") == 0:
device.device_name = component.split(":")[1].strip()
elif component.find("transport_id:") == 0:
device.transport_id = component.split(":")[1].strip()
connected_devices.append(device)
return connected_devices
def am_start(self, package_name, component_name,
action_name=None, extra_string=None):
"""Calls 'adb am start ...'.
Args:
package_name: The package name to be invoked.
component_name: The component name in the package to be directed to.
action_name: The action name of the intent (usually in the form of
action.intent.SOME_ACTION)
extra_string: The extra strings to pass along.
Returns:
A boolean indicating the success of the operation.
"""
logger = self._logger
cmd = self.adb_shell_command[:]
cmd.extend(["am", "start"])
if action_name:
cmd.extend(["-a", action_name])
if extra_string:
cmd.extend(["--es", extra_string[0], extra_string[1]])
cmd.extend(["-n", "{}/{}".format(package_name, component_name)])
logger.debug("cmd: %s", cmd)
sw = self.syscall_wrapper
sw.call_returnable_command(cmd)
if sw.error_occured:
logger.error("%s failed: %s", cmd, sw.error_message)
return False
return True
def logcat_clear(self):
logger = self._logger
cmd = self.adb_command[:]
cmd.extend(["logcat", "-c"])
logger.debug("cmd: %s", cmd)
sw = self.syscall_wrapper
sw.call_returnable_command_ignore_output(cmd)
return
def logcat_find(self, patterns, terminating_function):
"""Helper function to find certain patterns in logcat.
Args:
patterns: A list containing regex/patterns to be passed to "adb logcat"
command for logcat level filtering.
terminating_function: A function pointer to a function that takes a buffer
as input parameter. When None is returned by the
terminating function, the process will be terminated
and control flow will return to the caller.
Returns:
A string representing the results obtained from terminating_function.
"""
logger = self._logger
cmd = self.adb_command[:]
cmd.extend(["logcat"])
if patterns:
cmd.extend(patterns)
logger.debug("cmd: %s", cmd)
sw = self.syscall_wrapper
sw.call_non_returnable_command(cmd, terminating_function)
if sw.error_occured:
logger.error("Failed to find pattern in logcat: %s", sw.error_message)
return None
return sw.result_final[0]
def install(self, path_to_apk):
"""Calls 'adb install APK'.
Args:
path_to_apk: A string representing the path to where the APK resides.
Returns:
A boolean indicating the success of the operation.
"""
logger = self._logger
cmd = self.adb_command[:]
cmd.extend(["install", path_to_apk])
logger.debug("cmd: %s", cmd)
sw = self.syscall_wrapper
sw.call_returnable_command(cmd)
if sw.error_occured:
logger.error("ADB install failed: %s", sw.error_message)
return False
return True
def uninstall(self, package_name):
"""Calls 'adb uninstall package_name' to delete the package from device.
Args:
package_name: The name of the package to be deleted.
Returns:
A boolean indicating the success of the operation.
"""
logger = self._logger
cmd = self.adb_command[:]
cmd.extend(["uninstall", package_name])
logger.debug("cmd: %s", cmd)
sw = self.syscall_wrapper
sw.call_returnable_command(cmd)
if sw.error_occured:
logger.error("ADB uninstall %s failed: %s", package_name,
sw.error_message)
return False
return True
def shell(self, cmd):
logger = self._logger
cmd = self.adb_shell_command + cmd
logger.debug("cmd: %s", cmd)
sw = self.syscall_wrapper
sw.call_returnable_command(cmd)
if sw.error_occured:
logger.error("%s failed with message: %s", cmd, sw.error_message)
return False
return True
def pull(self, source_path, target_path):
"""Calls 'adb pull' to download something from the device.
Args:
source_path: The path on device to pull from.
target_path: The path on host to download results to.
Returns:
A boolean indicating the success of the operation.
"""
logger = self._logger
cmd = self.adb_command[:]
cmd.extend(["pull", source_path, target_path])
logger.debug("cmd: %s", cmd)
sw = self.syscall_wrapper
sw.call_returnable_command(cmd)
if sw.error_occured:
logger.error("Pulling from %s failed: %s", source_path, sw.error_message)
return False
return True
def push(self, source_path, target_path):
"""Calls 'adb push' to upload something to the device.
Args:
source_path: The path on host containing dir or file to upload.
target_path: The path on device indicating where to upload to.
Returns:
A boolean indicating the success of the operation.
"""
logger = self._logger
cmd = self.adb_command[:]
cmd.extend(["push", source_path, target_path])
logger.debug("cmd: %s", cmd)
sw = self.syscall_wrapper
sw.call_returnable_command(cmd)
if sw.error_occured:
logger.error("%s failed: %s", cmd, sw.error_message)
return False
return True
def backup(self, backup_filepath, package_name):
"""Calls 'adb backup -f' to pull result files.
Note that this is not a generic adb backup implementation.
Args:
backup_filepath: The path to save the backup file to.
package_name: The name of the package to back-up.
Returns:
A boolean indicating the success of the operation.
"""
logger = self._logger
cmd = self.adb_command[:]
cmd.extend(["backup", "-f", backup_filepath, package_name])
logger.debug("cmd: %s", cmd)
sw = self.syscall_wrapper
sw.call_returnable_command(cmd)
if sw.error_occured:
logger.error("%s failed: %s", cmd, sw.error_message)
return False
return True
class DeviceInfo:
"""A class representing basic information about a physical device.
"""
serial_number = ""
product_name = ""
model_name = ""
device_name = ""
transport_id = ""
unauthorized = False
| {
"content_hash": "55cdae6526d52fb17e40f38b98d6ab9d",
"timestamp": "",
"source": "github",
"line_count": 445,
"max_line_length": 80,
"avg_line_length": 30.925842696629214,
"alnum_prop": 0.6339921523034443,
"repo_name": "android/security-certification-resources",
"id": "6df7fd1733470978242b825b56656152e6bc1540",
"size": "14365",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uraniborg/scripts/python/syscall_wrapper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AIDL",
"bytes": "7454"
},
{
"name": "Java",
"bytes": "1012053"
},
{
"name": "Python",
"bytes": "108037"
},
{
"name": "Shell",
"bytes": "1497"
}
],
"symlink_target": ""
} |
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_sslkeyandcertificate
author: Gaurav Rastogi (@grastogi23) <grastogi@avinetworks.com>
short_description: Module for setup of SSLKeyAndCertificate Avi RESTful Object
description:
- This module is used to configure SSLKeyAndCertificate object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
ca_certs:
description:
- Ca certificates in certificate chain.
certificate:
description:
- Sslcertificate settings for sslkeyandcertificate.
required: true
certificate_base64:
description:
- States if the certificate is base64 encoded.
- Field introduced in 18.1.2, 18.2.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
version_added: "2.9"
type: bool
certificate_management_profile_ref:
description:
- It is a reference to an object of type certificatemanagementprofile.
created_by:
description:
- Creator name.
dynamic_params:
description:
- Dynamic parameters needed for certificate management profile.
enckey_base64:
description:
- Encrypted private key corresponding to the private key (e.g.
- Those generated by an hsm such as thales nshield).
enckey_name:
description:
- Name of the encrypted private key (e.g.
- Those generated by an hsm such as thales nshield).
format:
description:
- Format of the key/certificate file.
- Enum options - SSL_PEM, SSL_PKCS12.
- Field introduced in 18.1.2, 18.2.1.
- Default value when not specified in API or module is interpreted by Avi Controller as SSL_PEM.
version_added: "2.9"
hardwaresecuritymodulegroup_ref:
description:
- It is a reference to an object of type hardwaresecuritymodulegroup.
key:
description:
- Private key.
key_base64:
description:
- States if the private key is base64 encoded.
- Field introduced in 18.1.2, 18.2.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
version_added: "2.9"
type: bool
key_params:
description:
- Sslkeyparams settings for sslkeyandcertificate.
key_passphrase:
description:
- Passphrase used to encrypt the private key.
- Field introduced in 18.1.2, 18.2.1.
version_added: "2.9"
name:
description:
- Name of the object.
required: true
status:
description:
- Enum options - ssl_certificate_finished, ssl_certificate_pending.
- Default value when not specified in API or module is interpreted by Avi Controller as SSL_CERTIFICATE_FINISHED.
tenant_ref:
description:
- It is a reference to an object of type tenant.
type:
description:
- Enum options - ssl_certificate_type_virtualservice, ssl_certificate_type_system, ssl_certificate_type_ca.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Create a SSL Key and Certificate
avi_sslkeyandcertificate:
controller: 10.10.27.90
username: admin
password: AviNetworks123!
key: |
-----BEGIN PRIVATE KEY-----
....
-----END PRIVATE KEY-----
certificate:
self_signed: true
certificate: |
-----BEGIN CERTIFICATE-----
....
-----END CERTIFICATE-----
type: SSL_CERTIFICATE_TYPE_VIRTUALSERVICE
name: MyTestCert
"""
RETURN = '''
obj:
description: SSLKeyAndCertificate (api/sslkeyandcertificate) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, avi_ansible_api, HAS_AVI)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
ca_certs=dict(type='list',),
certificate=dict(type='dict', required=True),
certificate_base64=dict(type='bool',),
certificate_management_profile_ref=dict(type='str',),
created_by=dict(type='str',),
dynamic_params=dict(type='list',),
enckey_base64=dict(type='str',),
enckey_name=dict(type='str',),
format=dict(type='str',),
hardwaresecuritymodulegroup_ref=dict(type='str',),
key=dict(type='str', no_log=True,),
key_base64=dict(type='bool',),
key_params=dict(type='dict',),
key_passphrase=dict(type='str', no_log=True,),
name=dict(type='str', required=True),
status=dict(type='str',),
tenant_ref=dict(type='str',),
type=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) or requests is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'sslkeyandcertificate',
set(['key_passphrase', 'key']))
if __name__ == '__main__':
main()
| {
"content_hash": "2359c004f74c93963ccd862fc78916f2",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 125,
"avg_line_length": 35.135416666666664,
"alnum_prop": 0.6019863622887637,
"repo_name": "thaim/ansible",
"id": "8b96fae24c188cc12b4b1ed1a4e60176b4ca4b97",
"size": "7083",
"binary": false,
"copies": "26",
"ref": "refs/heads/fix-broken-link",
"path": "lib/ansible/modules/network/avi/avi_sslkeyandcertificate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
} |
"""Tests For miscellaneous util methods used with share."""
import ddt
import mock
from manila.common import constants
from manila.share import utils as share_utils
from manila import test
@ddt.ddt
class ShareUtilsTestCase(test.TestCase):
def test_extract_host_without_pool(self):
host = 'Host@Backend'
self.assertEqual(
'Host@Backend', share_utils.extract_host(host))
def test_extract_host_only_return_host(self):
host = 'Host@Backend'
self.assertEqual(
'Host', share_utils.extract_host(host, 'host'))
def test_extract_host_only_return_pool(self):
host = 'Host@Backend'
self.assertIsNone(
share_utils.extract_host(host, 'pool'))
def test_extract_host_only_return_backend(self):
host = 'Host@Backend'
self.assertEqual(
'Host@Backend', share_utils.extract_host(host, 'backend'))
def test_extract_host_missing_backend_and_pool(self):
host = 'Host'
# Default level is 'backend'
self.assertEqual(
'Host', share_utils.extract_host(host))
def test_extract_host_only_return_backend_name(self):
host = 'Host@Backend#Pool'
self.assertEqual(
'Backend', share_utils.extract_host(host, 'backend_name'))
def test_extract_host_only_return_backend_name_index_error(self):
host = 'Host#Pool'
self.assertRaises(IndexError,
share_utils.extract_host,
host, 'backend_name')
def test_extract_host_missing_backend(self):
host = 'Host#Pool'
self.assertEqual(
'Host', share_utils.extract_host(host))
self.assertEqual(
'Host', share_utils.extract_host(host, 'host'))
def test_extract_host_missing_backend_only_return_backend(self):
host = 'Host#Pool'
self.assertEqual(
'Host', share_utils.extract_host(host, 'backend'))
def test_extract_host_missing_backend_only_return_pool(self):
host = 'Host#Pool'
self.assertEqual(
'Pool', share_utils.extract_host(host, 'pool'))
self.assertEqual(
'Pool', share_utils.extract_host(host, 'pool', True))
def test_extract_host_missing_pool(self):
host = 'Host@Backend'
self.assertIsNone(
share_utils.extract_host(host, 'pool'))
def test_extract_host_missing_pool_use_default_pool(self):
host = 'Host@Backend'
self.assertEqual(
'_pool0', share_utils.extract_host(host, 'pool', True))
def test_extract_host_with_default_pool(self):
host = 'Host'
# Default_pool_name doesn't work for level other than 'pool'
self.assertEqual(
'Host', share_utils.extract_host(host, 'host', True))
self.assertEqual(
'Host', share_utils.extract_host(host, 'host', False))
self.assertEqual(
'Host', share_utils.extract_host(host, 'backend', True))
self.assertEqual(
'Host', share_utils.extract_host(host, 'backend', False))
def test_extract_host_with_pool(self):
host = 'Host@Backend#Pool'
self.assertEqual(
'Host@Backend', share_utils.extract_host(host))
self.assertEqual(
'Host', share_utils.extract_host(host, 'host'))
self.assertEqual(
'Host@Backend', share_utils.extract_host(host, 'backend'),)
self.assertEqual(
'Pool', share_utils.extract_host(host, 'pool'))
self.assertEqual(
'Pool', share_utils.extract_host(host, 'pool', True))
def test_append_host_with_host_and_pool(self):
host = 'Host'
pool = 'Pool'
expected = 'Host#Pool'
self.assertEqual(expected,
share_utils.append_host(host, pool))
def test_append_host_with_host(self):
host = 'Host'
pool = None
expected = 'Host'
self.assertEqual(expected,
share_utils.append_host(host, pool))
def test_append_host_with_pool(self):
host = None
pool = 'pool'
expected = None
self.assertEqual(expected,
share_utils.append_host(host, pool))
def test_append_host_with_no_values(self):
host = None
pool = None
expected = None
self.assertEqual(expected,
share_utils.append_host(host, pool))
def test_get_active_replica_success(self):
replica_list = [{'id': '123456',
'replica_state': constants.REPLICA_STATE_IN_SYNC},
{'id': '654321',
'replica_state': constants.REPLICA_STATE_ACTIVE},
]
replica = share_utils.get_active_replica(replica_list)
self.assertEqual('654321', replica['id'])
def test_get_active_replica_not_exist(self):
replica_list = [{'id': '123456',
'replica_state': constants.REPLICA_STATE_IN_SYNC},
{'id': '654321',
'replica_state': constants.REPLICA_STATE_OUT_OF_SYNC},
]
replica = share_utils.get_active_replica(replica_list)
self.assertIsNone(replica)
class NotifyUsageTestCase(test.TestCase):
@mock.patch('manila.share.utils._usage_from_share')
@mock.patch('manila.share.utils.CONF')
@mock.patch('manila.share.utils.rpc')
def test_notify_about_share_usage(self, mock_rpc, mock_conf, mock_usage):
mock_conf.host = 'host1'
output = share_utils.notify_about_share_usage(mock.sentinel.context,
mock.sentinel.share,
mock.sentinel.
share_instance,
'test_suffix')
self.assertIsNone(output)
mock_usage.assert_called_once_with(mock.sentinel.share,
mock.sentinel.share_instance)
mock_rpc.get_notifier.assert_called_once_with('share',
'host1')
mock_rpc.get_notifier.return_value.info.assert_called_once_with(
mock.sentinel.context,
'share.test_suffix',
mock_usage.return_value)
@mock.patch('manila.share.utils._usage_from_share')
@mock.patch('manila.share.utils.CONF')
@mock.patch('manila.share.utils.rpc')
def test_notify_about_share_usage_with_kwargs(self, mock_rpc, mock_conf,
mock_usage):
mock_conf.host = 'host1'
output = share_utils.notify_about_share_usage(mock.sentinel.context,
mock.sentinel.share,
mock.sentinel.
share_instance,
'test_suffix',
extra_usage_info={
'a': 'b', 'c': 'd'},
host='host2')
self.assertIsNone(output)
mock_usage.assert_called_once_with(mock.sentinel.share,
mock.sentinel.share_instance,
a='b', c='d')
mock_rpc.get_notifier.assert_called_once_with('share',
'host2')
mock_rpc.get_notifier.return_value.info.assert_called_once_with(
mock.sentinel.context,
'share.test_suffix',
mock_usage.return_value)
| {
"content_hash": "4615e763be4cdccda76e23b930cd25b2",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 79,
"avg_line_length": 40.11734693877551,
"alnum_prop": 0.5340200941116622,
"repo_name": "bswartz/manila",
"id": "cc7f89d2beb1abc653e855f979d5c550d3f78198",
"size": "8533",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manila/tests/share/test_share_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "953"
},
{
"name": "Python",
"bytes": "9952105"
},
{
"name": "Shell",
"bytes": "106606"
}
],
"symlink_target": ""
} |
"""Wraps toco interface with python lazy loader."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# We need to import pywrap_tensorflow prior to the toco wrapper.
# pylint: disable=invalid-import-order,g-bad-import-order
from tensorflow.python import pywrap_tensorflow # pylint: disable=unused-import
from tensorflow.python import _pywrap_toco_api
# TODO(b/137402359): Remove lazy loading wrapper
def wrapped_toco_convert(model_flags_str, toco_flags_str, input_data_str,
debug_info_str, enable_mlir_converter):
"""Wraps TocoConvert with lazy loader."""
return _pywrap_toco_api.TocoConvert(
model_flags_str,
toco_flags_str,
input_data_str,
False, # extended_return
debug_info_str,
enable_mlir_converter)
def wrapped_get_potentially_supported_ops():
"""Wraps TocoGetPotentiallySupportedOps with lazy loader."""
return _pywrap_toco_api.TocoGetPotentiallySupportedOps()
def wrapped_experimental_mlir_quantize(input_data_str):
"""Wraps experimental mlir quantize model."""
return _pywrap_toco_api.ExperimentalMlirQuantizeModel(input_data_str)
| {
"content_hash": "1cd26988ceb4414d12c3f7468b6aa54a",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 80,
"avg_line_length": 34.85294117647059,
"alnum_prop": 0.7358649789029535,
"repo_name": "gunan/tensorflow",
"id": "2d3357819a43ed54fe9002d2c904d372f341661f",
"size": "1874",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/lite/python/wrap_toco.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5003"
},
{
"name": "Batchfile",
"bytes": "45924"
},
{
"name": "C",
"bytes": "774953"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "77908225"
},
{
"name": "CMake",
"bytes": "6500"
},
{
"name": "Dockerfile",
"bytes": "104215"
},
{
"name": "Go",
"bytes": "1841471"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "962443"
},
{
"name": "Jupyter Notebook",
"bytes": "556650"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1479029"
},
{
"name": "Makefile",
"bytes": "58603"
},
{
"name": "Objective-C",
"bytes": "104667"
},
{
"name": "Objective-C++",
"bytes": "297830"
},
{
"name": "PHP",
"bytes": "23994"
},
{
"name": "Pascal",
"bytes": "3739"
},
{
"name": "Pawn",
"bytes": "17039"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "39476740"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Roff",
"bytes": "2472"
},
{
"name": "Ruby",
"bytes": "7459"
},
{
"name": "Shell",
"bytes": "650007"
},
{
"name": "Smarty",
"bytes": "34649"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
from oslo_log import log as logging
import trove.common.apischema as apischema
from trove.common.auth import admin_context
from trove.common.i18n import _
from trove.common import wsgi
from trove.extensions.mgmt.upgrade.models import UpgradeMessageSender
LOG = logging.getLogger(__name__)
class UpgradeController(wsgi.Controller):
"""
Controller for guest agent upgrade
"""
schemas = apischema.upgrade
@admin_context
def create(self, req, body, tenant_id, instance_id):
LOG.info(_("Sending upgrade notifications\nreq : '%(req)s'\n"
"Admin tenant_id: %(tenant_id)s"),
{"tenant_id": tenant_id, "req": req})
context = req.environ.get(wsgi.CONTEXT_KEY)
upgrade = body['upgrade']
instance_version = upgrade.get('instance_version')
location = upgrade.get('location')
metadata = upgrade.get('metadata')
send = UpgradeMessageSender.create(
context, instance_id, instance_version, location, metadata)
send()
return wsgi.Result(None, 202)
| {
"content_hash": "19f86b59783aef7b6ad8e1160b2b7d09",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 71,
"avg_line_length": 30.083333333333332,
"alnum_prop": 0.6583564173591875,
"repo_name": "zhangg/trove",
"id": "d4eba1944d947199b4edf4c5dbb08a5806447712",
"size": "1719",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "trove/extensions/mgmt/upgrade/service.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4546016"
},
{
"name": "Shell",
"bytes": "145524"
}
],
"symlink_target": ""
} |
"""Serial algorithm for eigh, employing canonical (fixed trace) purification."""
import functools
import jax
from jax import lax
import jax.numpy as jnp
import numpy as np
from distla_core.linalg.eigh.serial import purify
from distla_core.linalg.polar.serial import polar
from distla_core.utils import misc
@functools.partial(jax.jit, static_argnums=(3, 4))
def split_spectrum(P, H, V, k, precision):
"""
Computes projections of the matrix H into the column and null spaces
of the projector P.
Returns the projected matrices along with copies of V, updated to now
include the isometries effecting the projections.
Args:
P: The `N x N` Hermitian projector into the subspace of `H`'s `k` smallest
eigenvalues.
H: The `N x N` Hermitian matrix to project.
k: The number of eigenvalues to be placed in the subspace of small
eigenvalues.
V: Matrix of isometries to be updated
precision: The matmul precision.
Returns:
H_minus: The `k x k` matrix sharing `H`'s `k` smallest eigenvalues.
V_minus: `V` times the isometry mapping `H` to `H_minus`.
H_plus: The `N-k x N-k` matrix sharing `H`'s `N-k` largest eigenvalues.
V_plus: `V` times the isometry mapping `H` to `H_plus`.
"""
V_minus, V_plus = purify.subspace(P, k, precision, "complete")
H_minus = misc.similarity_transform(H, V_minus, precision)
H_plus = misc.similarity_transform(H, V_plus, precision)
if V is not None:
V_minus = jnp.dot(V, V_minus, precision=precision)
V_plus = jnp.dot(V, V_plus, precision=precision)
return H_minus, V_minus, H_plus, V_plus
def _combine_eigenblocks(out_minus, out_plus):
"""
Concatenates H_minus with H_plus, and V_minus with V_plus.
"""
H_minus, V_minus = out_minus
H_plus, V_plus = out_plus
H = np.hstack((H_minus, H_plus))
V = np.hstack((V_minus, V_plus))
return H, V
def _eigh_work(H, V, precision):
"""
The main work loop performing the symmetric eigendecomposition of an
`N x N` Hermitian matrix `H`.
Each step recursively computes a projector into the space of eigenvalues
above and beneath the `N // 2`'th eigenvalue.
The result of the projections into and out of
that space, along with the isometries accomplishing these, are then computed.
This is performed recursively until the projections have size 128, at
which point a standard eigensolver is used. The results are then composed.
A future implementation will use the Jax rather than the NumPy version of
`eigh`, once the fast ASIC version of the former is added.
Args:
H: The Hermitian input.
V: Stores the isometries projecting H into its subspaces.
precision: The matmul precision.
Returns:
H, V: The result of the projection.
"""
N = H.shape[0]
if N <= 128:
H, Vk = np.linalg.eigh(H) # TODO: replace with jnp.linalg.eigh
if V is not None:
Vk = jnp.dot(V, Vk, precision=precision)
return H, Vk
k = N // 2
P, _, errs = purify.canonically_purify(H, k, precision=precision)
H_minus, V_minus, H_plus, V_plus = split_spectrum(P, H, V, k, precision)
out_minus = _eigh_work(H_minus, V_minus, precision)
out_plus = _eigh_work(H_plus, V_plus, precision)
return _combine_eigenblocks(out_minus, out_plus)
def eigh(H, precision=lax.Precision.HIGHEST):
"""
Computes the eigendecomposition of the symmetric/Hermitian matrix H.
Args:
H: The Hermitian input. Hermiticity is not enforced.
precision: The matmul precision.
Returns:
evals, eVecs: The *unsorted* eigenvalues and eigenvectors.
"""
N, M = H.shape
if N != M:
raise TypeError(f"Input H of shape {H.shape} must be square.")
if N <= 128:
return np.linalg.eigh(H)
ev, eV = _eigh_work(H, None, precision)
return ev, eV
def svd(A, precision=lax.Precision.HIGHEST):
"""
Computes the SVD of the input matrix A.
Args:
A: The input matrix.
precision: The matmul precision.
Returns:
U, S, V: Such that A = (U * S) @ V.conj().T
"""
Up, H, _, _, _ = polar.polar(A, precision=precision)
S, V = eigh(H, precision=precision)
U = jnp.dot(Up, V, precision=precision)
return U, S, V
| {
"content_hash": "2328dc2e402e81f2d6ff4123f7118d70",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 80,
"avg_line_length": 32.503937007874015,
"alnum_prop": 0.686531007751938,
"repo_name": "google/distla_core",
"id": "a31144875e458945d7755013c413d700d66dce8d",
"size": "4812",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "distla/distla_core/distla_core/linalg/eigh/serial/eigh_canonical.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1317325"
},
{
"name": "Shell",
"bytes": "5454"
}
],
"symlink_target": ""
} |
"""
A collection of fonts and the functions necessary for accessing them.
A common problem in drawing libraries is finding and loading fonts. The
usual functions for these (in PIL, aggdraw, matplotlib) require a specific
single path. If this path is hardcoded in, portability issues can arise.
This module solves this problem by encompassing a group of freely available
fonts, the necessary functions to list and get the paths leading to them::
> import fonts
> theFPath = fonts.getFontPath ("vera.ttf")
'C:\\Documents and Settings\agapow\My Documents\...\fonts\vera.ttf'
> fonts.gettruetypeFonts()
['cmex10.ttf', 'cmr10.ttf', ... 'VeraSeBd.ttf']
Note that the truetype fonts are made available via Bitstream, Inc. See
``COPYRIGHT.TXT`` for more details.
"""
# TODO: more fonts
# TODO: default font?
# TODO: change name style
__docformat__ = 'restructuredtext'
### IMPORTS ###
import os
import os.path
import exceptions
__all__ = [
'',
]
### CONSTANTS & DEFINES ###
# To locate (and load or open) the fonts in this module, we need to get the
# path of this file and then of the module.
__ourPath = globals() ['__file__']
__MODULEPATH = os.path.abspath (os.path.dirname (__ourPath))
_DATAPATH = os.path.join (__MODULEPATH, 'data')
### IMPLEMENTATION ###
def getFontPath (name):
thePath = os.path.join (_DATAPATH, name)
if (os.path.exists (thePath)):
return thePath
else:
raise exceptions.ValueError ("font '%s' cannot be found or loaded" % name)
def listFontsByExtension (ext):
"""
What fonts are available that end in this extension?
Note that this is case-sensitive: '18.PIL' is different to '18.pil'.
"""
return [x for x in os.listdir (_DATAPATH) if x.endswith (ext)]
def listTruetypeFonts ():
return listFontsByExtension ('.ttf')
### END #######################################################################
| {
"content_hash": "9a5e87ba0cc4176f8a5ec2d0db8b98b5",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 79,
"avg_line_length": 27.294117647058822,
"alnum_prop": 0.6821120689655172,
"repo_name": "agapow/relais.webviz",
"id": "cd4d21fb967d2a53da0c5363fa8c93a25c4fb19c",
"size": "1903",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "relais/webviz/pilfonts/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "79208"
}
],
"symlink_target": ""
} |
from typing import Optional
# third party
from nacl.signing import VerifyKey
# relative
from ......grid import GridURL
from .....common.serde.serializable import serializable
from ....domain_msg_registry import DomainMessageRegistry
from ....network_msg_registry import NetworkMessageRegistry
from ....node_service import NodeServiceInterface
from ...permissions.user_permissions import NoRestriction
from ...permissions.user_permissions import UserIsOwner
from ..generic_payload.syft_message import NewSyftMessage as SyftMessage
from ..generic_payload.syft_message import ReplyPayload
from ..generic_payload.syft_message import RequestPayload
from .node_credentials import NodeCredentials
# ExchangeCredentials Messages
# Step 1: InitiateExchangeCredentialsWithNodeMessage
# Step 2: ExchangeCredentialsWithNodeMessage
@serializable(recursive_serde=True)
class InitiateExchangeCredentialsWithNodeMessage(
SyftMessage, DomainMessageRegistry, NetworkMessageRegistry
):
permissions = [NoRestriction | UserIsOwner] # UserIsOwner not working
# Pydantic Inner class to define expected request payload fields.
class Request(RequestPayload):
"""Payload fields and types used during a Request."""
target_node_url: str
# Pydantic Inner class to define expected reply payload fields.
class Reply(ReplyPayload):
"""Payload fields and types used during a Response."""
message: str = "Node credentials exchanged."
request_payload_type = Request
reply_payload_type = Reply
def run( # type: ignore
self, node: NodeServiceInterface, verify_key: Optional[VerifyKey] = None
) -> ReplyPayload: # type: ignore
"""Validates the request parameters and sends a ExchangeCredentialsWithNodeMessage.
Args:
node (NodeServiceInterface): Node either Domain or Network.
verify_key (Optional[VerifyKey], optional): User signed verification key. Defaults to None.
Raises:
InvalidNodeCredentials: If the credentials are invalid
Returns:
ReplyPayload: Message on successful exchange.
"""
# TODO: get the client from the node and use a hashmap of uuid and / or url
# to cache existing client objects to target nodes
# get client for target node
# relative
from ......grid.client.client import connect
# we may be trying to call another local test host like localhost so we need
# to also call as_container_host
target_url = (
GridURL.from_url(self.payload.target_node_url)
.with_path("/api/v1")
.as_container_host(container_host=node.settings.CONTAINER_HOST)
)
# we use our local keys so that signing and verification matches our node
target_client = connect(url=target_url, timeout=10, user_key=node.signing_key)
# send credentials to other node
credentials = node.get_credentials()
# see ExchangeCredentialsWithNodeMessage below
signed_response = target_client.networking.exchange_credentials_with_node(
credentials=credentials
)
# since we're getting back the SignedMessage it can't hurt to check once more
if not signed_response.is_valid:
raise Exception(
"Response was signed by a fake key or was corrupted in transit."
)
response_credentials = NodeCredentials(
**signed_response.message.payload.credentials # type: ignore
)
# can we get the associated verify_key?
response_credentials.validate(key=signed_response.verify_key)
# add response NodeCredentials
node.node.add_or_update_node_credentials(credentials=response_credentials)
return self.Reply()
@serializable(recursive_serde=True)
class ExchangeCredentialsWithNodeMessage(
SyftMessage, DomainMessageRegistry, NetworkMessageRegistry
):
permissions = [NoRestriction]
# Pydantic Inner class to define expected request payload fields.
class Request(RequestPayload):
"""Payload fields and types used during a Request."""
credentials: dict
# Pydantic Inner class to define expected reply payload fields.
class Reply(ReplyPayload):
"""Payload fields and types used during a Response."""
credentials: dict
message: str = "Node credentials validated and added."
request_payload_type = Request
reply_payload_type = Reply
def run( # type: ignore
self, node: NodeServiceInterface, verify_key: Optional[VerifyKey] = None
) -> ReplyPayload: # type: ignore
"""Validates the request parameters and exchanges credentials.
Args:
node (NodeServiceInterface): Node either Domain or Network.
verify_key (Optional[VerifyKey], optional): User signed verification key. Defaults to None.
Raises:
InvalidNodeCredentials: If the credentials are invalid
Returns:
ReplyPayload: Message on successful user creation.
"""
request_credentials = NodeCredentials(**self.payload.credentials)
# check the key we will store and compare to the node table is from the holder
# of the private key who signed the message
request_credentials.validate(key=verify_key)
# validate NodeCredentials
node.node.add_or_update_node_credentials(credentials=request_credentials)
# respond with this nodes NodeCredentials
response_credentials = node.get_credentials()
return self.Reply(**{"credentials": response_credentials})
| {
"content_hash": "364aab5b6f541f5022773dd969bd61e2",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 103,
"avg_line_length": 36.52258064516129,
"alnum_prop": 0.6963434022257552,
"repo_name": "OpenMined/PySyft",
"id": "9f770d00f5ad5b3bac8c0427e0d862a2e79409fa",
"size": "5670",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "packages/syft/src/syft/core/node/common/node_service/node_credential/node_credential_messages.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2084"
},
{
"name": "Cap'n Proto",
"bytes": "1377"
},
{
"name": "Dockerfile",
"bytes": "9740"
},
{
"name": "HCL",
"bytes": "4438"
},
{
"name": "JavaScript",
"bytes": "85898"
},
{
"name": "Jupyter Notebook",
"bytes": "33167760"
},
{
"name": "Makefile",
"bytes": "7605"
},
{
"name": "Mako",
"bytes": "510"
},
{
"name": "PowerShell",
"bytes": "161"
},
{
"name": "Python",
"bytes": "3710174"
},
{
"name": "Shell",
"bytes": "52371"
},
{
"name": "TypeScript",
"bytes": "346493"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function, unicode_literals
from future.builtins import *
from unittest import TestCase
from ... import SpannerModelRegistry
from .helper import TestModelA, TestModelB, TestModelC, TestModelD
class SpannerModelTests(TestCase):
def test_get_registered_models_in_correct_order(self):
sorted_models = list(SpannerModelRegistry.get_registered_models_in_correct_order())
self.assertEqual(len(sorted_models), 4)
self.assertEqual(sorted_models[0], TestModelA)
self.assertEqual(sorted_models[1], TestModelB)
self.assertEqual(sorted_models[2], TestModelD)
self.assertEqual(sorted_models[3], TestModelC)
def test_stmt_create(self):
ddl_statements = SpannerModelRegistry.create_table_statements()
self.assertEqual(len(ddl_statements), 10)
# ModelA
self.assertEqual(ddl_statements[0], """CREATE TABLE `model_a` (
`id_a` INT64 NOT NULL,
`field_int_not_null` INT64 NOT NULL,
`field_int_null` INT64 NULL,
`field_string_not_null` INT64 NOT NULL,
`field_string_null` STRING(200) NULL
) PRIMARY KEY (`id_a` );""")
# interleave index test
# fixme: move to own test
self.assertEqual(ddl_statements[3], "CREATE INDEX `interleaved` ON `model_b` (`id_a` , `idb_b` DESC, `value_field_x` , `value_field_y` ), INTERLEAVE IN `model_a`;")
# ModelD
self.assertEqual(ddl_statements[6], """CREATE TABLE `model_d` (
`id_a` INT64 NOT NULL,
`id_b` INT64 NOT NULL,
`value_field_x` INT64 NULL,
`value_field_y` INT64 NULL,
`value_field_z` STRING(5) NULL,
`id_d` INT64 NOT NULL
) PRIMARY KEY (`id_a` , `id_b` ) INTERLEAVE IN `model_a ` ON DELETE CASCADE;""")
def test_prio_dict(self):
prio_dict = SpannerModelRegistry.get_registered_models_prio_dict()
prio_dict_lookup = {}
for i in range(0, 10):
for cls in prio_dict[i]:
prio_dict_lookup[cls] = i
self.assertEqual(prio_dict_lookup[TestModelA], 0)
self.assertEqual(prio_dict_lookup[TestModelB], 1)
self.assertEqual(prio_dict_lookup[TestModelD], 1)
self.assertEqual(prio_dict_lookup[TestModelC], 2)
def test_stmt_delete(self):
ddl_statements = SpannerModelRegistry.delete_table_statements()
self.assertEqual(len(ddl_statements), 4)
self.assertEqual(ddl_statements[0], 'DROP TABLE `model_a`')
self.assertEqual(ddl_statements[1], 'DROP TABLE `model_b`')
self.assertEqual(ddl_statements[2], 'DROP TABLE `model_d`')
self.assertEqual(ddl_statements[3], 'DROP TABLE `model_c`')
| {
"content_hash": "ee5897e61f2e32c0ecd88c8fabdd673b",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 173,
"avg_line_length": 39.696969696969695,
"alnum_prop": 0.6706106870229007,
"repo_name": "AndreCimander/ezspanner",
"id": "993d314487ef506d32154f9670815e9ddaee44fb",
"size": "2644",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ezspanner/ezspanner/tests/v1/test_model.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "75872"
}
],
"symlink_target": ""
} |
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseBadRequest, JsonResponse
from django.shortcuts import get_object_or_404, render
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.views.decorators.http import require_POST
from django.views.decorators.vary import vary_on_headers
from wagtail.admin.utils import PermissionPolicyChecker
from wagtail.core.models import Collection
from wagtail.images import get_image_model
from wagtail.images.fields import ALLOWED_EXTENSIONS
from wagtail.images.forms import get_image_form
from wagtail.images.permissions import permission_policy
from wagtail.search.backends import get_search_backends
permission_checker = PermissionPolicyChecker(permission_policy)
def get_image_edit_form(ImageModel):
ImageForm = get_image_form(ImageModel)
# Make a new form with the file and focal point fields excluded
class ImageEditForm(ImageForm):
class Meta(ImageForm.Meta):
model = ImageModel
exclude = (
'file',
'focal_point_x',
'focal_point_y',
'focal_point_width',
'focal_point_height',
)
return ImageEditForm
@permission_checker.require('add')
@vary_on_headers('X-Requested-With')
def add(request):
Image = get_image_model()
ImageForm = get_image_form(Image)
collections = permission_policy.collections_user_has_permission_for(request.user, 'add')
if len(collections) > 1:
collections_to_choose = Collection.order_for_display(collections)
else:
# no need to show a collections chooser
collections_to_choose = None
if request.method == 'POST':
if not request.is_ajax():
return HttpResponseBadRequest("Cannot POST to this view without AJAX")
if not request.FILES:
return HttpResponseBadRequest("Must upload a file")
# Build a form for validation
form = ImageForm({
'title': request.FILES['files[]'].name,
'collection': request.POST.get('collection'),
}, {
'file': request.FILES['files[]'],
}, user=request.user)
if form.is_valid():
# Save it
image = form.save(commit=False)
image.uploaded_by_user = request.user
image.file_size = image.file.size
image.file.seek(0)
image._set_file_hash(image.file.read())
image.file.seek(0)
image.save()
# Success! Send back an edit form for this image to the user
return JsonResponse({
'success': True,
'image_id': int(image.id),
'form': render_to_string('wagtailimages/multiple/edit_form.html', {
'image': image,
'form': get_image_edit_form(Image)(
instance=image, prefix='image-%d' % image.id, user=request.user
),
}, request=request),
})
else:
# Validation error
return JsonResponse({
'success': False,
# https://github.com/django/django/blob/stable/1.6.x/django/forms/util.py#L45
'error_message': '\n'.join(['\n'.join([force_text(i) for i in v]) for k, v in form.errors.items()]),
})
else:
form = ImageForm(user=request.user)
return render(request, 'wagtailimages/multiple/add.html', {
'max_filesize': form.fields['file'].max_upload_size,
'help_text': form.fields['file'].help_text,
'allowed_extensions': ALLOWED_EXTENSIONS,
'error_max_file_size': form.fields['file'].error_messages['file_too_large_unknown_size'],
'error_accepted_file_types': form.fields['file'].error_messages['invalid_image'],
'collections': collections_to_choose,
})
@require_POST
def edit(request, image_id, callback=None):
Image = get_image_model()
ImageForm = get_image_edit_form(Image)
image = get_object_or_404(Image, id=image_id)
if not request.is_ajax():
return HttpResponseBadRequest("Cannot POST to this view without AJAX")
if not permission_policy.user_has_permission_for_instance(request.user, 'change', image):
raise PermissionDenied
form = ImageForm(
request.POST, request.FILES, instance=image, prefix='image-' + image_id, user=request.user
)
if form.is_valid():
form.save()
# Reindex the image to make sure all tags are indexed
for backend in get_search_backends():
backend.add(image)
return JsonResponse({
'success': True,
'image_id': int(image_id),
})
else:
return JsonResponse({
'success': False,
'image_id': int(image_id),
'form': render_to_string('wagtailimages/multiple/edit_form.html', {
'image': image,
'form': form,
}, request=request),
})
@require_POST
def delete(request, image_id):
image = get_object_or_404(get_image_model(), id=image_id)
if not request.is_ajax():
return HttpResponseBadRequest("Cannot POST to this view without AJAX")
if not permission_policy.user_has_permission_for_instance(request.user, 'delete', image):
raise PermissionDenied
image.delete()
return JsonResponse({
'success': True,
'image_id': int(image_id),
})
| {
"content_hash": "82e62d074bb15222764d8da7991c28ee",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 116,
"avg_line_length": 34.30246913580247,
"alnum_prop": 0.6149001259672485,
"repo_name": "nealtodd/wagtail",
"id": "bd2740f16a7f9fc37b365439f7d9ab61aa28c4f9",
"size": "5557",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "wagtail/images/views/multiple.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "190511"
},
{
"name": "Dockerfile",
"bytes": "703"
},
{
"name": "HTML",
"bytes": "371011"
},
{
"name": "JavaScript",
"bytes": "262163"
},
{
"name": "Makefile",
"bytes": "992"
},
{
"name": "Python",
"bytes": "3564287"
},
{
"name": "Shell",
"bytes": "8289"
}
],
"symlink_target": ""
} |
from __future__ import annotations
from typing import TYPE_CHECKING, Optional
import logging
import math
from ezdxf.math import Vec2, UCS, NULLVEC
from ezdxf.lldxf import const
from ezdxf.entities import DimStyleOverride, Dimension
from .dim_base import (
BaseDimensionRenderer,
get_required_defpoint,
compile_mtext,
)
if TYPE_CHECKING:
from ezdxf.eztypes import GenericLayoutType
__all__ = ["OrdinateDimension"]
logger = logging.getLogger("ezdxf")
class OrdinateDimension(BaseDimensionRenderer):
# Required defpoints:
# defpoint = origin (group code 10)
# defpoint2 = feature location (group code 13)
# defpoint3 = end of leader (group code 14)
# user text location is ignored (group code 11) and replaced by default
# location calculated by the ezdxf renderer:
def __init__(
self,
dimension: Dimension,
ucs: Optional[UCS] = None,
override: Optional[DimStyleOverride] = None,
):
# The local coordinate system is defined by origin and the
# horizontal_direction in OCS:
self.origin_ocs: Vec2 = get_required_defpoint(dimension, "defpoint")
self.feature_location_ocs: Vec2 = get_required_defpoint(
dimension, "defpoint2"
)
self.end_of_leader_ocs: Vec2 = get_required_defpoint(
dimension, "defpoint3"
)
# Horizontal direction in clockwise orientation, see DXF reference
# for group code 51:
self.horizontal_dir = -dimension.dxf.get("horizontal_direction", 0.0)
self.rotation = math.radians(self.horizontal_dir)
self.local_x_axis = Vec2.from_angle(self.rotation)
self.local_y_axis = self.local_x_axis.orthogonal()
self.x_type = bool( # x-type is set!
dimension.dxf.get("dimtype", 0) & const.DIM_ORDINATE_TYPE
)
super().__init__(dimension, ucs, override)
# Measurement directions can be opposite to local x- or y-axis
self.leader_vec_ocs = self.end_of_leader_ocs - self.feature_location_ocs
leader_x_vec = self.local_x_axis.project(self.leader_vec_ocs)
leader_y_vec = self.local_y_axis.project(self.leader_vec_ocs)
try:
self.measurement_direction: Vec2 = leader_x_vec.normalize()
except ZeroDivisionError:
self.measurement_direction = Vec2(1, 0)
try:
self.measurement_orthogonal: Vec2 = leader_y_vec.normalize()
except ZeroDivisionError:
self.measurement_orthogonal = Vec2(0, 1)
if not self.x_type:
self.measurement_direction, self.measurement_orthogonal = (
self.measurement_orthogonal,
self.measurement_direction,
)
self.update_measurement()
if self.tol.has_limits:
self.tol.update_limits(self.measurement.value)
# Text width and -height is required first, text location and -rotation
# are not valid yet:
self.text_box = self.init_text_box()
# Set text location and rotation:
self.measurement.text_location = self.get_default_text_location()
self.measurement.text_rotation = self.get_default_text_rotation()
# Update text box location and -rotation:
self.text_box.center = self.measurement.text_location
self.text_box.angle = self.measurement.text_rotation
self.geometry.set_text_box(self.text_box)
# Update final text location in the DIMENSION entity:
self.dimension.dxf.text_midpoint = self.measurement.text_location
def get_default_text_location(self) -> Vec2:
if self.x_type:
text_vertical_shifting_dir = -self.local_x_axis
else:
text_vertical_shifting_dir = self.local_y_axis
# user text location is not supported and ignored:
return (
self.end_of_leader_ocs
+ self.measurement_orthogonal * (self.text_box.width * 0.5)
+ text_vertical_shifting_dir
* self.measurement.text_vertical_distance()
)
def get_default_text_rotation(self) -> float:
# user text rotation is not supported and ignored:
return (90.0 if self.x_type else 0.0) + self.horizontal_dir
def update_measurement(self) -> None:
feature_location_vec: Vec2 = self.feature_location_ocs - self.origin_ocs
# ordinate measurement is always absolute:
self.measurement.update(
self.local_x_axis.project(feature_location_vec).magnitude
if self.x_type
else self.local_y_axis.project(feature_location_vec).magnitude
)
def get_defpoints(self) -> list[Vec2]:
return [
self.origin_ocs,
self.feature_location_ocs,
self.end_of_leader_ocs,
]
def transform_ucs_to_wcs(self) -> None:
"""Transforms dimension definition points into WCS or if required into
OCS.
"""
def from_ucs(attr, func):
point = dxf.get(attr, NULLVEC)
dxf.set(attr, func(point))
dxf = self.dimension.dxf
ucs = self.geometry.ucs
from_ucs("defpoint", ucs.to_wcs)
from_ucs("defpoint2", ucs.to_wcs)
from_ucs("defpoint3", ucs.to_wcs)
from_ucs("text_midpoint", ucs.to_ocs)
# Horizontal direction in clockwise orientation, see DXF reference
# for group code 51:
dxf.horizontal_direction = -ucs.to_ocs_angle_deg(self.horizontal_dir)
def render(self, block: GenericLayoutType) -> None:
"""Main method to create dimension geometry of basic DXF entities in the
associated BLOCK layout.
Args:
block: target BLOCK for rendering
"""
super().render(block)
self.add_ordinate_leader()
measurement = self.measurement
if measurement.text:
if self.geometry.supports_dxf_r2000:
text = compile_mtext(measurement, self.tol)
else:
text = measurement.text
self.add_measurement_text(
text, measurement.text_location, measurement.text_rotation
)
self.geometry.add_defpoints(self.get_defpoints())
def add_ordinate_leader(self) -> None:
# DXF attributes from first extension line not from dimension line!
attribs = self.extension_lines.dxfattribs(1)
# The ordinate leader is normal to the measurement direction.
# leader direction and text direction:
direction = self.measurement_orthogonal
leg_size = self.arrows.arrow_size * 2.0
# /---1---TEXT
# x----0----/
# d0 = distance from feature location (x) to 1st upward junction
d0 = direction.project(self.leader_vec_ocs).magnitude - 2.0 * leg_size
start0 = (
self.feature_location_ocs + direction * self.extension_lines.offset
)
end0 = self.feature_location_ocs + direction * max(leg_size, d0)
start1 = self.end_of_leader_ocs - direction * leg_size
end1 = self.end_of_leader_ocs
if self.measurement.vertical_placement != 0:
end1 += direction * self.text_box.width
self.add_line(start0, end0, dxfattribs=attribs)
self.add_line(end0, start1, dxfattribs=attribs)
self.add_line(start1, end1, dxfattribs=attribs)
def add_measurement_text(
self, dim_text: str, pos: Vec2, rotation: float
) -> None:
"""Add measurement text to dimension BLOCK.
Args:
dim_text: dimension text
pos: text location
rotation: text rotation in degrees
"""
attribs = self.measurement.dxfattribs()
self.add_text(dim_text, pos=pos, rotation=rotation, dxfattribs=attribs)
| {
"content_hash": "491e956a8c9d85c13ced9b07f34b11d4",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 80,
"avg_line_length": 37.729468599033815,
"alnum_prop": 0.6261203585147247,
"repo_name": "mozman/ezdxf",
"id": "e534d69d196b18b4c26ef6fb440e5a03dd24af76",
"size": "7875",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ezdxf/render/dim_ordinate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "5745"
},
{
"name": "CSS",
"bytes": "3565"
},
{
"name": "Common Lisp",
"bytes": "727"
},
{
"name": "Cython",
"bytes": "111923"
},
{
"name": "HTML",
"bytes": "1417"
},
{
"name": "JavaScript",
"bytes": "11132"
},
{
"name": "Python",
"bytes": "6336553"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class IdssrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="idssrc", parent_name="box", **kwargs):
super(IdssrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
| {
"content_hash": "63269a90acfd3d5bd0a2918dbf1cfa46",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 74,
"avg_line_length": 35.916666666666664,
"alnum_prop": 0.5986078886310905,
"repo_name": "plotly/python-api",
"id": "cb8e8d0141a5f394bab4487ab27a14edd3bf7425",
"size": "431",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/box/_idssrc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
} |
"""Signing Model Objects
This module contains classes that encapsulate data about the signing process.
"""
import os.path
class CodeSignedProduct(object):
"""Represents a build product that will be signed with `codesign(1)`."""
def __init__(self,
path,
identifier,
options=None,
requirements=None,
identifier_requirement=True,
sign_with_identifier=False,
entitlements=None,
verify_options=None):
"""A build product to be codesigned.
Args:
path: The path to the product to be signed. This is relative to a
work directory containing the build products.
identifier: The unique identifier set when code signing. This is
only explicitly passed with the `--identifier` flag if
|sign_with_identifier| is True.
options: Options flags to pass to `codesign --options`, from
|CodeSignOptions|.
requirements: String for additional `--requirements` to pass to the
`codesign` command. These are joined with a space to the
|config.CodeSignConfig.codesign_requirements_basic| string. See
|CodeSignedProduct.requirements_string()| for details.
identifier_requirement: If True, a designated identifier requirement
based on |identifier| will be inserted into the requirements
string. If False, then no designated requirement will be
generated based on the identifier.
sign_with_identifier: If True, then the identifier will be specified
when running the `codesign` command. If False, `codesign` will
infer the identifier itself.
entitlements: File name of the entitlements file to sign the product
with. The file should reside in the |Paths.packaging_dir|.
verify_options: Flags to pass to `codesign --verify`, from
|VerifyOptions|.
"""
self.path = path
self.identifier = identifier
if not CodeSignOptions.valid(options):
raise ValueError('Invalid CodeSignOptions: {}'.format(options))
self.options = options
self.requirements = requirements
self.identifier_requirement = identifier_requirement
self.sign_with_identifier = sign_with_identifier
self.entitlements = entitlements
if not VerifyOptions.valid(verify_options):
raise ValueError('Invalid VerifyOptions: {}'.format(verify_options))
self.verify_options = verify_options
def requirements_string(self, config):
"""Produces a full requirements string for the product.
Args:
config: A |config.CodeSignConfig| object.
Returns:
A string for designated requirements of the product, which can be
passed to `codesign --requirements`.
"""
# If the signing identity indicates ad-hoc (i.e. no real signing
# identity), do not enforce any requirements. Ad hoc signing will append
# a hash to the identifier, which would violate the
# identifier_requirement and most other requirements that would be
# specified.
if config.identity == '-':
return ''
reqs = []
#if self.identifier_requirement:
# reqs.append('designated => identifier "{identifier}"'.format(
# identifier=self.identifier))
if self.requirements:
reqs.append(self.requirements)
if config.codesign_requirements_basic:
reqs.append(config.codesign_requirements_basic)
return ' '.join(reqs)
def __repr__(self):
return 'CodeSignedProduct(identifier={0.identifier}, ' \
'options={0.options}, path={0.path})'.format(self)
def make_enum(class_name, options):
"""Makes a new class type for an enum.
Args:
class_name: Name of the new type to make.
options: A dictionary of enum options to use. The keys will become
attributes on the class, and the values will be wrapped in a tuple
so that the options can be joined together.
Returns:
A new class for the enum.
"""
attrs = {}
@classmethod
def valid(cls, opts_to_check):
"""Tests if the specified |opts_to_check| are valid.
Args:
options: Iterable of option strings.
Returns:
True if all the options are valid, False if otherwise.
"""
if opts_to_check is None:
return True
valid_values = options.values()
return all([option in valid_values for option in opts_to_check])
attrs['valid'] = valid
for name, value in options.items():
assert type(name) is str
assert type(value) is str
attrs[name] = (value,)
return type(class_name, (object,), attrs)
"""Enum for the options that can be specified when validating the results of
code signing.
These options are passed to `codesign --verify` after the
|CodeSignedProduct| has been signed.
"""
VerifyOptions = make_enum(
'signing.model.VerifyOptions', {
'DEEP': '--deep',
'STRICT': '--strict',
'NO_STRICT': '--no-strict',
'IGNORE_RESOURCES': '--ignore-resources',
})
CodeSignOptions = make_enum(
'signing.model.CodeSignOptions', {
'RESTRICT': 'restrict',
'LIBRARY_VALIDATION': 'library',
'HARDENED_RUNTIME': 'runtime',
'KILL': 'kill',
})
# Specify the components of HARDENED_RUNTIME that are also available on
# older macOS versions.
CodeSignOptions.FULL_HARDENED_RUNTIME_OPTIONS = (
CodeSignOptions.HARDENED_RUNTIME + CodeSignOptions.RESTRICT +
CodeSignOptions.LIBRARY_VALIDATION + CodeSignOptions.KILL)
class Distribution(object):
"""A Distribution represents a final, signed, and potentially channel-
customized Chrome product.
Channel customization refers to modifying parts of the app bundle structure
to have different file names, internal identifiers, and assets.
"""
def __init__(self,
channel=None,
branding_code=None,
app_name_fragment=None,
packaging_name_fragment=None,
product_dirname=None,
creator_code=None,
channel_customize=False,
package_as_dmg=True,
package_as_pkg=False,
inflation_kilobytes=0):
"""Creates a new Distribution object. All arguments are optional.
Args:
channel: The release channel for the product.
branding_code: A branding code helps track how users acquired the
product from various marketing channels.
app_name_fragment: If present, this string fragment is appended to
the |config.CodeSignConfig.app_product|. This renames the binary
and outer app bundle.
packaging_name_fragment: If present, this is appended to the
|config.CodeSignConfig.packaging_basename| to help differentiate
different |branding_code|s.
product_dirname: If present, this string value is set in the app's
Info.plist with the key "CrProductDirName". This key influences
the browser's default user-data-dir location.
creator_code: If present, this will set a new macOS creator code
in the Info.plist "CFBundleSignature" key and in the PkgInfo
file. If this is not specified, the original values from the
build products will be kept.
channel_customize: If True, then the product will be modified in
several ways:
- The |channel| will be appended to the
|config.CodeSignConfig.base_bundle_id|.
- The product will be renamed with |app_name_fragment|.
- Different assets will be used for icons in the app.
package_as_dmg: If True, then a .dmg file will be created containing
the product.
package_as_pkg: If True, then a .pkg file will be created containing
the product.
inflation_kilobytes: If non-zero, a blob of this size will be
inserted into the DMG. Incompatible with package_as_pkg = True.
"""
if channel_customize:
# Side-by-side channels must have a distinct names and creator
# codes, as well as keep their user data in separate locations.
assert channel
assert app_name_fragment
assert product_dirname
# assert creator_code
self.channel = channel
self.branding_code = branding_code
self.app_name_fragment = app_name_fragment
self.packaging_name_fragment = packaging_name_fragment
self.product_dirname = product_dirname
self.creator_code = creator_code
self.channel_customize = channel_customize
self.package_as_dmg = package_as_dmg
self.package_as_pkg = package_as_pkg
self.inflation_kilobytes = inflation_kilobytes
# inflation_kilobytes are only inserted into DMGs
assert not self.inflation_kilobytes or self.package_as_dmg
def brandless_copy(self):
"""Derives and returns a copy of this Distribution object, identical
except for not having a branding code.
This is useful in the case where a non-branded app bundle needs to be
created with otherwise the same configuration.
"""
return Distribution(self.channel, None, self.app_name_fragment,
self.packaging_name_fragment, self.product_dirname,
self.creator_code, self.channel_customize,
self.package_as_dmg, self.package_as_pkg)
def to_config(self, base_config):
"""Produces a derived |config.CodeSignConfig| for the Distribution.
Args:
base_config: The base CodeSignConfig to derive.
Returns:
A new CodeSignConfig instance that uses information in the
Distribution to alter various properties of the |base_config|.
"""
this = self
class DistributionCodeSignConfig(base_config.__class__):
@property
def base_config(self):
return base_config
@property
def distribution(self):
return this
@property
def app_product(self):
if this.channel_customize:
return '{} {}'.format(base_config.app_product,
this.app_name_fragment)
return base_config.app_product
@property
def base_bundle_id(self):
base_bundle_id = base_config.base_bundle_id
if this.channel_customize:
return base_bundle_id + '.' + this.channel
return base_bundle_id
@property
def provisioning_profile_basename(self):
profile = base_config.provisioning_profile_basename
if profile and this.channel_customize:
return '{}_{}'.format(profile, this.app_name_fragment)
return profile
@property
def packaging_basename(self):
if this.packaging_name_fragment:
return '{}-{}-{}'.format(
self.app_product.replace(' ', ''), self.version,
this.packaging_name_fragment)
return super(DistributionCodeSignConfig,
self).packaging_basename
return DistributionCodeSignConfig(base_config.identity,
base_config.installer_identity,
base_config.notary_user,
base_config.notary_password,
base_config.notary_asc_provider)
class Paths(object):
"""Paths holds the three file path contexts for signing operations.
The input directory always remains un-modified.
The output directory is where final, signed products are stored.
The work directory is set by internal operations.
"""
def __init__(self, input, output, work):
self._input = os.path.abspath(input)
self._output = os.path.abspath(output)
self._work = work
if self._work:
self._work = os.path.abspath(self._work)
@property
def input(self):
return self._input
@property
def output(self):
return self._output
@property
def work(self):
return self._work
def packaging_dir(self, config):
"""Returns the path to the product packaging directory, which contains
scripts and assets used in signing.
Args:
config: The |config.CodeSignConfig| object.
Returns:
Path to the packaging directory.
"""
return os.path.join(self.input, '{}_Packaging'.format(config.product))
def replace_work(self, new_work):
"""Creates a new Paths with the same input and output directories, but
with |work| set to |new_work|."""
return Paths(self.input, self.output, new_work)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return (self._input == other._input and
self._output == other._output and self._work == other._work)
def __repr__(self):
return 'Paths(input={0.input}, output={0.output}, ' \
'work={0.work})'.format(self)
| {
"content_hash": "a3bab1e920cdd6c49f4984337ab9aea0",
"timestamp": "",
"source": "github",
"line_count": 357,
"max_line_length": 80,
"avg_line_length": 39.15126050420168,
"alnum_prop": 0.5966945696501396,
"repo_name": "ric2b/Vivaldi-browser",
"id": "efb25b8bf1b8edb4ba156733a74959735b4d8141",
"size": "14139",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chromium/chrome/installer/mac/signing/model.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
master_doc = "index"
project = "variadic"
author = '<a href="http://vincent-jacques.net/contact">Vincent Jacques</a>'
copyright = ('2015 {} <script>var jacquev6_ribbon_github="{}"</script>'.format(author, project) +
'<script src="https://jacquev6.github.io/ribbon.js"></script>')
extensions = []
nitpicky = True
# nitpick_ignore
# https://github.com/bitprophet/alabaster
# html_theme_path
extensions.append("alabaster")
html_theme = "alabaster"
html_sidebars = {
"**": ["about.html", "searchbox.html"],
}
html_theme_options = {
"github_user": "jacquev6",
"github_repo": project,
"travis_button": True,
}
# http://sphinx-doc.org/ext/autodoc.html
extensions.append("sphinx.ext.autodoc")
# autoclass_content
autodoc_member_order = "bysource"
# autodoc_default_flags
# autodoc_docstring_signature
# autodoc_mock_imports
add_module_names = False
add_class_names = False
# http://sphinx-doc.org/ext/githubpages.html
extensions.append("sphinx.ext.githubpages")
# http://sphinx-doc.org/ext/doctest.html
extensions.append("sphinx.ext.doctest")
# doctest_path
doctest_global_setup = "from variadic import variadic"
# doctest_global_cleanup
# doctest_test_doctest_blocks
| {
"content_hash": "1be5bd45aa65c1f303ff4b41459017b6",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 97,
"avg_line_length": 25.51063829787234,
"alnum_prop": 0.7114261884904087,
"repo_name": "jacquev6/variadic",
"id": "baeffbbafa175b1bff3d34a873364cff1d363b2f",
"size": "1285",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12091"
},
{
"name": "Shell",
"bytes": "3523"
}
],
"symlink_target": ""
} |
import tensorflow as tf
import numpy as np
def conv_relu(input, kernel_shape, bias_shape):
# Create variable named "weights".
weights = tf.get_variable("weights", kernel_shape,
initializer=tf.random_normal_initializer())
# Create variable named "biases".
biases = tf.get_variable("biases", bias_shape,
initializer=tf.constant_initializer(0.0))
conv = tf.nn.conv2d(input, weights,
strides=[1, 1, 1, 1], padding='SAME')
return tf.nn.relu(conv + biases)
def my_image_filter(input_images):
with tf.variable_scope("conv1") as scope:
print "%s:%s" % (scope.name, scope.reuse)
# Variables created here will be named "conv1/weights", "conv1/biases".
relu1 = conv_relu(input_images, [5, 5, 32, 32], [32])
with tf.variable_scope("conv2") as scope:
print "%s:%s" % (scope.name, scope.reuse)
# Variables created here will be named "conv2/weights", "conv2/biases".
return conv_relu(relu1, [5, 5, 32, 32], [32])
explanation = """
get_variable()就像是在map里面按key取value,其行为被它所属的variable_scope的reuse这个属性控制。
reuse有三种取值,False就只能新建而且不能重名,True就只能用已有的不能新建,None则如果已有就用已有的,没有就新建。
而且整个tensorflow/python/ops/variable_scope.py的代码里(包括 tf.variable_scope() 这个函数),reuse的默认值都是None,按理来说就不会报重复。
但有个潜规则A:每层variable_scope都从上一层variable_scope继承reuse的值,具体实现在 _pure_variable_scope()函数里:
reuse = reuse or old.reuse # Re-using is inherited by sub-scopes.
还有个潜规则B:最顶层的variable_scope被显式初始化为reuse=True,具体实现在get_variable_scope()函数里:
scope = VariableScope(False)
所以,Tensorflow的默认行为就变成了不允许重名,必须新建。
上面说的是实现。
而从意图上看,这应该是故意这么做的,为了避免shared by accident。
"""
print explanation
shape = [5,5,32,32]
image1 = image2 = np.zeros(shape, dtype="float32")
print "\nCASE: default value of `reuse` is False and forbids variable sharing.\n"
global_scope = tf.get_variable_scope()
print "%s:%s" % ('global_scope', global_scope.reuse)
try:
result1 = my_image_filter(image1)
result2 = my_image_filter(image2)
except Exception as e:
print "\nThe following exception is expected:\n"
print e
print "\nCASE: Setting reuse to None allows creation and variable sharing.\n"
global_scope._reuse = None
result1 = my_image_filter(image1)
result2 = my_image_filter(image2)
print "\nNo exception raised, variables reused."
print "\nCASE: Setting reuse to True forbids creation.\n"
with tf.variable_scope("another_root_scope") as scope:
try:
scope._reuse = True
result1 = my_image_filter(image1)
result2 = my_image_filter(image2)
except Exception as e:
print "\nThe following exception is expected:\n"
print e
| {
"content_hash": "c437fd73a13e0703c7b694615a42bb7e",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 104,
"avg_line_length": 31.53012048192771,
"alnum_prop": 0.703859380970577,
"repo_name": "utensil/julia-playground",
"id": "7312bac4331218a9842215f9a1caa9c0f6736102",
"size": "3069",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dl/hello_shared_variables.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Julia",
"bytes": "49"
},
{
"name": "Jupyter Notebook",
"bytes": "8245805"
},
{
"name": "Python",
"bytes": "33235"
},
{
"name": "Shell",
"bytes": "753"
}
],
"symlink_target": ""
} |
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = r'''
---
module: nxos_pim_interface
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages PIM interface configuration.
description:
- Manages PIM interface configuration settings.
author:
- Jason Edelman (@jedelman8)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- When C(state=default), supported params will be reset to a default state.
These include C(dr_prio), C(hello_auth_key), C(hello_interval), C(jp_policy_out),
C(jp_policy_in), C(jp_type_in), C(jp_type_out), C(border), C(neighbor_policy),
C(neighbor_type).
- The C(hello_auth_key) param is not idempotent.
- C(hello_auth_key) only supports clear text passwords.
- When C(state=absent), pim interface configuration will be set to defaults and pim-sm
will be disabled on the interface.
- PIM must be enabled on the device to use this module.
- This module is for Layer 3 interfaces.
options:
interface:
description:
- Full name of the interface such as Ethernet1/33.
type: str
required: true
sparse:
description:
- Enable/disable sparse-mode on the interface.
type: bool
default: no
dr_prio:
description:
- Configures priority for PIM DR election on interface.
type: str
hello_auth_key:
description:
- Authentication for hellos on this interface.
type: str
hello_interval:
description:
- Hello interval in milliseconds for this interface.
type: int
jp_policy_out:
description:
- Policy for join-prune messages (outbound).
type: str
jp_policy_in:
description:
- Policy for join-prune messages (inbound).
type: str
jp_type_out:
description:
- Type of policy mapped to C(jp_policy_out).
type: str
choices: [ prefix, routemap ]
jp_type_in:
description:
- Type of policy mapped to C(jp_policy_in).
type: str
choices: [ prefix, routemap ]
border:
description:
- Configures interface to be a boundary of a PIM domain.
type: bool
default: no
neighbor_policy:
description:
- Configures a neighbor policy for filtering adjacencies.
type: str
neighbor_type:
description:
- Type of policy mapped to neighbor_policy.
type: str
choices: [ prefix, routemap ]
state:
description:
- Manages desired state of the resource.
type: str
choices: [ present, default ]
default: present
'''
EXAMPLES = r'''
- name: Ensure PIM is not running on the interface
nxos_pim_interface:
interface: eth1/33
state: absent
- name: Ensure the interface has pim-sm enabled with the appropriate priority and hello interval
nxos_pim_interface:
interface: eth1/33
dr_prio: 10
hello_interval: 40
state: present
- name: Ensure join-prune policies exist
nxos_pim_interface:
interface: eth1/33
jp_policy_in: JPIN
jp_policy_out: JPOUT
jp_type_in: routemap
jp_type_out: routemap
- name: Ensure defaults are in place
nxos_pim_interface:
interface: eth1/33
state: default
'''
RETURN = r'''
commands:
description: command sent to the device
returned: always
type: list
sample: ["interface eth1/33", "ip pim neighbor-policy test",
"ip pim neighbor-policy test"]
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.nxos.nxos import get_config, load_config, run_commands
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args
from ansible.module_utils.network.nxos.nxos import get_interface_type
from ansible.module_utils.six import string_types
PARAM_TO_COMMAND_KEYMAP = {
'interface': '',
'sparse': 'ip pim sparse-mode',
'dr_prio': 'ip pim dr-priority {0}',
'hello_interval': 'ip pim hello-interval {0}',
'hello_auth_key': 'ip pim hello-authentication ah-md5 {0}',
'border': 'ip pim border',
'jp_policy_out': 'ip pim jp-policy prefix-list {0} out',
'jp_policy_in': 'ip pim jp-policy prefix-list {0} in',
'jp_type_in': '',
'jp_type_out': '',
'neighbor_policy': 'ip pim neighbor-policy prefix-list {0}',
'neighbor_type': '',
}
PARAM_TO_DEFAULT_KEYMAP = {
'dr_prio': '1',
'hello_interval': '30000',
'sparse': False,
'border': False,
'hello_auth_key': False,
}
def execute_show_command(command, module, text=False):
if text:
cmds = [{
'command': command,
'output': 'text'
}]
else:
cmds = [{
'command': command,
'output': 'json'
}]
return run_commands(module, cmds)
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def local_existing(gexisting):
jp_bidir = False
isauth = False
if gexisting:
jp_bidir = gexisting.get('jp_bidir')
isauth = gexisting.get('isauth')
if jp_bidir and isauth:
gexisting.pop('jp_bidir')
gexisting.pop('isauth')
return gexisting, jp_bidir, isauth
def get_interface_mode(interface, intf_type, module):
mode = 'unknown'
command = 'show interface {0}'.format(interface)
body = execute_show_command(command, module)
try:
interface_table = body[0]['TABLE_interface']['ROW_interface']
except (KeyError, AttributeError, IndexError):
return mode
if intf_type in ['ethernet', 'portchannel']:
mode = str(interface_table.get('eth_mode', 'layer3'))
if mode in ['access', 'trunk']:
mode = 'layer2'
elif mode == 'routed':
mode = 'layer3'
elif intf_type in ['loopback', 'svi']:
mode = 'layer3'
return mode
def get_pim_interface(module, interface):
pim_interface = {}
body = get_config(module, flags=['interface {0}'.format(interface)])
pim_interface['neighbor_type'] = None
pim_interface['neighbor_policy'] = None
pim_interface['jp_policy_in'] = None
pim_interface['jp_policy_out'] = None
pim_interface['jp_type_in'] = None
pim_interface['jp_type_out'] = None
pim_interface['jp_bidir'] = False
pim_interface['isauth'] = False
if body:
all_lines = body.splitlines()
for each in all_lines:
if 'jp-policy' in each:
policy_name = \
re.search(r'ip pim jp-policy(?: prefix-list)? (\S+)(?: \S+)?', each).group(1)
if 'prefix-list' in each:
ptype = 'prefix'
else:
ptype = 'routemap'
if 'out' in each:
pim_interface['jp_policy_out'] = policy_name
pim_interface['jp_type_out'] = ptype
elif 'in' in each:
pim_interface['jp_policy_in'] = policy_name
pim_interface['jp_type_in'] = ptype
else:
pim_interface['jp_policy_in'] = policy_name
pim_interface['jp_policy_out'] = policy_name
pim_interface['jp_bidir'] = True
elif 'neighbor-policy' in each:
pim_interface['neighbor_policy'] = \
re.search(r'ip pim neighbor-policy(?: prefix-list)? (\S+)', each).group(1)
if 'prefix-list' in each:
pim_interface['neighbor_type'] = 'prefix'
else:
pim_interface['neighbor_type'] = 'routemap'
elif 'ah-md5' in each:
pim_interface['isauth'] = True
elif 'sparse-mode' in each:
pim_interface['sparse'] = True
elif 'border' in each:
pim_interface['border'] = True
elif 'hello-interval' in each:
pim_interface['hello_interval'] = \
re.search(r'ip pim hello-interval (\d+)', body).group(1)
elif 'dr-priority' in each:
pim_interface['dr_prio'] = \
re.search(r'ip pim dr-priority (\d+)', body).group(1)
return pim_interface
def fix_delta(delta, existing):
for key in list(delta):
if key in ['dr_prio', 'hello_interval', 'sparse', 'border']:
if delta.get(key) == PARAM_TO_DEFAULT_KEYMAP.get(key) and existing.get(key) is None:
delta.pop(key)
return delta
def config_pim_interface(delta, existing, jp_bidir, isauth):
command = None
commands = []
delta = fix_delta(delta, existing)
if jp_bidir:
if delta.get('jp_policy_in') or delta.get('jp_policy_out'):
if existing.get('jp_type_in') == 'prefix':
command = 'no ip pim jp-policy prefix-list {0}'.format(existing.get('jp_policy_in'))
else:
command = 'no ip pim jp-policy {0}'.format(existing.get('jp_policy_in'))
if command:
commands.append(command)
for k, v in delta.items():
if k in ['dr_prio', 'hello_interval', 'hello_auth_key', 'border',
'sparse']:
if v:
command = PARAM_TO_COMMAND_KEYMAP.get(k).format(v)
elif k == 'hello_auth_key':
if isauth:
command = 'no ip pim hello-authentication ah-md5'
else:
command = 'no ' + PARAM_TO_COMMAND_KEYMAP.get(k).format(v)
if command:
commands.append(command)
elif k in ['neighbor_policy', 'jp_policy_in', 'jp_policy_out',
'neighbor_type']:
if k in ['neighbor_policy', 'neighbor_type']:
temp = delta.get('neighbor_policy') or existing.get(
'neighbor_policy')
if delta.get('neighbor_type') == 'prefix':
command = PARAM_TO_COMMAND_KEYMAP.get(k).format(temp)
elif delta.get('neighbor_type') == 'routemap':
command = 'ip pim neighbor-policy {0}'.format(temp)
elif existing.get('neighbor_type') == 'prefix':
command = PARAM_TO_COMMAND_KEYMAP.get(k).format(temp)
elif existing.get('neighbor_type') == 'routemap':
command = 'ip pim neighbor-policy {0}'.format(temp)
elif k in ['jp_policy_in', 'jp_type_in']:
temp = delta.get('jp_policy_in') or existing.get(
'jp_policy_in')
if delta.get('jp_type_in') == 'prefix':
command = PARAM_TO_COMMAND_KEYMAP.get(k).format(temp)
elif delta.get('jp_type_in') == 'routemap':
command = 'ip pim jp-policy {0} in'.format(temp)
elif existing.get('jp_type_in') == 'prefix':
command = PARAM_TO_COMMAND_KEYMAP.get(k).format(temp)
elif existing.get('jp_type_in') == 'routemap':
command = 'ip pim jp-policy {0} in'.format(temp)
elif k in ['jp_policy_out', 'jp_type_out']:
temp = delta.get('jp_policy_out') or existing.get(
'jp_policy_out')
if delta.get('jp_type_out') == 'prefix':
command = PARAM_TO_COMMAND_KEYMAP.get(k).format(temp)
elif delta.get('jp_type_out') == 'routemap':
command = 'ip pim jp-policy {0} out'.format(temp)
elif existing.get('jp_type_out') == 'prefix':
command = PARAM_TO_COMMAND_KEYMAP.get(k).format(temp)
elif existing.get('jp_type_out') == 'routemap':
command = 'ip pim jp-policy {0} out'.format(temp)
if command:
commands.append(command)
command = None
return commands
def get_pim_interface_defaults():
args = dict(dr_prio=PARAM_TO_DEFAULT_KEYMAP.get('dr_prio'),
border=PARAM_TO_DEFAULT_KEYMAP.get('border'),
sparse=PARAM_TO_DEFAULT_KEYMAP.get('sparse'),
hello_interval=PARAM_TO_DEFAULT_KEYMAP.get('hello_interval'),
hello_auth_key=PARAM_TO_DEFAULT_KEYMAP.get('hello_auth_key'))
default = dict((param, value) for (param, value) in args.items()
if value is not None)
return default
def default_pim_interface_policies(existing, jp_bidir):
commands = []
if jp_bidir:
if existing.get('jp_policy_in') or existing.get('jp_policy_out'):
if existing.get('jp_type_in') == 'prefix':
command = 'no ip pim jp-policy prefix-list {0}'.format(existing.get('jp_policy_in'))
if command:
commands.append(command)
elif not jp_bidir:
command = None
for k in existing:
if k == 'jp_policy_in':
if existing.get('jp_policy_in'):
if existing.get('jp_type_in') == 'prefix':
command = 'no ip pim jp-policy prefix-list {0} in'.format(
existing.get('jp_policy_in')
)
else:
command = 'no ip pim jp-policy {0} in'.format(
existing.get('jp_policy_in')
)
elif k == 'jp_policy_out':
if existing.get('jp_policy_out'):
if existing.get('jp_type_out') == 'prefix':
command = 'no ip pim jp-policy prefix-list {0} out'.format(
existing.get('jp_policy_out')
)
else:
command = 'no ip pim jp-policy {0} out'.format(
existing.get('jp_policy_out')
)
if command:
commands.append(command)
command = None
if existing.get('neighbor_policy'):
command = 'no ip pim neighbor-policy'
commands.append(command)
return commands
def config_pim_interface_defaults(existing, jp_bidir, isauth):
command = []
# returns a dict
defaults = get_pim_interface_defaults()
delta = dict(set(defaults.items()).difference(
existing.items()))
if delta:
# returns a list
command = config_pim_interface(delta, existing,
jp_bidir, isauth)
comm = default_pim_interface_policies(existing, jp_bidir)
if comm:
for each in comm:
command.append(each)
return command
def main():
argument_spec = dict(
interface=dict(type='str', required=True),
sparse=dict(type='bool', default=False),
dr_prio=dict(type='str'),
hello_auth_key=dict(type='str'),
hello_interval=dict(type='int'),
jp_policy_out=dict(type='str'),
jp_policy_in=dict(type='str'),
jp_type_out=dict(type='str', choices=['prefix', 'routemap']),
jp_type_in=dict(type='str', choices=['prefix', 'routemap']),
border=dict(type='bool', default=False),
neighbor_policy=dict(type='str'),
neighbor_type=dict(type='str', choices=['prefix', 'routemap']),
state=dict(type='str', default='present', choices=['absent', 'default', 'present']),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
warnings = list()
check_args(module, warnings)
results = {'changed': False, 'commands': [], 'warnings': warnings}
state = module.params['state']
interface = module.params['interface']
jp_type_in = module.params['jp_type_in']
jp_type_out = module.params['jp_type_out']
jp_policy_in = module.params['jp_policy_in']
jp_policy_out = module.params['jp_policy_out']
neighbor_policy = module.params['neighbor_policy']
neighbor_type = module.params['neighbor_type']
hello_interval = module.params['hello_interval']
intf_type = get_interface_type(interface)
if get_interface_mode(interface, intf_type, module) == 'layer2':
module.fail_json(msg='this module only works on Layer 3 interfaces.')
if jp_policy_in:
if not jp_type_in:
module.fail_json(msg='jp_type_in required when using jp_policy_in.')
if jp_policy_out:
if not jp_type_out:
module.fail_json(msg='jp_type_out required when using jp_policy_out.')
if neighbor_policy:
if not neighbor_type:
module.fail_json(msg='neighbor_type required when using neighbor_policy.')
get_existing = get_pim_interface(module, interface)
existing, jp_bidir, isauth = local_existing(get_existing)
args = PARAM_TO_COMMAND_KEYMAP.keys()
proposed = dict((k, v) for k, v in module.params.items()
if v is not None and k in args)
if hello_interval:
proposed['hello_interval'] = str(proposed['hello_interval'] * 1000)
delta = dict(set(proposed.items()).difference(existing.items()))
commands = []
if state == 'present':
if delta:
command = config_pim_interface(delta, existing, jp_bidir, isauth)
if command:
commands.append(command)
elif state == 'default' or state == 'absent':
defaults = config_pim_interface_defaults(existing, jp_bidir, isauth)
if defaults:
commands.append(defaults)
if commands:
commands.insert(0, ['interface {0}'.format(interface)])
cmds = flatten_list(commands)
if cmds:
results['changed'] = True
if not module.check_mode:
load_config(module, cmds)
if 'configure' in cmds:
cmds.pop(0)
results['commands'] = cmds
module.exit_json(**results)
if __name__ == '__main__':
main()
| {
"content_hash": "544259c94bb0d6cd20e3f7f90c5833e1",
"timestamp": "",
"source": "github",
"line_count": 516,
"max_line_length": 100,
"avg_line_length": 34.87984496124031,
"alnum_prop": 0.5718413157017447,
"repo_name": "SergeyCherepanov/ansible",
"id": "227f2179b6d9c199af56e22e29358537a6e89d53",
"size": "18135",
"binary": false,
"copies": "19",
"ref": "refs/heads/master",
"path": "ansible/ansible/modules/network/nxos/nxos_pim_interface.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Shell",
"bytes": "824"
}
],
"symlink_target": ""
} |
+++ src/cryptography/hazmat/bindings/openssl/engine.py
@@ -49,7 +49,6 @@ int ENGINE_init(ENGINE *);
int ENGINE_finish(ENGINE *);
void ENGINE_load_openssl(void);
void ENGINE_load_dynamic(void);
-void ENGINE_load_cryptodev(void);
void ENGINE_load_builtin_engines(void);
void ENGINE_cleanup(void);
ENGINE *ENGINE_get_default_RSA(void);
| {
"content_hash": "c6869cd0396cfdb2172078674d71e35a",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 54,
"avg_line_length": 37.666666666666664,
"alnum_prop": 0.7345132743362832,
"repo_name": "radare/void-packages",
"id": "9f05a871fad1f8c96bdd6a56138cdeab7c1aaec3",
"size": "423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "srcpkgs/python-cryptography/patches/patch-src_cryptography_hazmat_bindings_openssl_engine.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "101787"
},
{
"name": "C++",
"bytes": "3195"
},
{
"name": "Groff",
"bytes": "119286"
},
{
"name": "JavaScript",
"bytes": "649"
},
{
"name": "Makefile",
"bytes": "625"
},
{
"name": "Perl",
"bytes": "9397"
},
{
"name": "Python",
"bytes": "2914"
},
{
"name": "Shell",
"bytes": "541932"
},
{
"name": "VimL",
"bytes": "175"
}
],
"symlink_target": ""
} |
from haystack import indexes
from .models import Ruling
class RulingIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
file_reference = indexes.CharField(model_attr='file_reference')
date = indexes.DateTimeField(model_attr='date', null=True)
court = indexes.CharField(model_attr='court', faceted=True)
jurisdiction = indexes.CharField(model_attr='jurisdiction', faceted=True)
granted = indexes.CharField(model_attr='granted', faceted=True)
subject = indexes.CharField(model_attr='subject')
content = indexes.CharField(model_attr='content')
def get_model(self):
return Ruling
def index_queryset(self, using=None):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
| {
"content_hash": "f085dc1b4a25437c783b3eec7b750552",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 77,
"avg_line_length": 37.77272727272727,
"alnum_prop": 0.7184115523465704,
"repo_name": "netzwerkrecherche/auskunftsrecht",
"id": "2423c408d9b10b024477df4eba3c260d25f5a5f4",
"size": "831",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rulings/search_indexes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "45"
},
{
"name": "HTML",
"bytes": "14107"
},
{
"name": "Python",
"bytes": "16127"
}
],
"symlink_target": ""
} |
import harvestmedia
from harvestmedia.api.config import Config
from harvestmedia.api.library import Library
from harvestmedia.api.client import Client
api_key = 'e1d5d645d2d984e499e816a7a314dfbd610149f124c3373455c37ad75ab3ffccf444a04a10953b62'
webservice_url = 'https://service.harvestmedia.net/HMP-WS.svc'
client = Client(api_key=api_key, debug_level='DEBUG')
libraries = Library.query.get_libraries(client)
for library in libraries:
albums = library.get_albums()
for album in albums:
tracks = album.get_tracks()
for track in tracks:
print track.name
| {
"content_hash": "89cd9efb567e47b5caac968b6bfef534",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 92,
"avg_line_length": 28.5,
"alnum_prop": 0.7929824561403509,
"repo_name": "ralfonso/harvestmedia",
"id": "4e9c23668c28a6dc1f04f9205f37ad0e0f2a634d",
"size": "570",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "remote_tests/hmtest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "129422"
}
],
"symlink_target": ""
} |
from functools import wraps
from flask import request, Response, current_app
def check_auth(username, password, roles):
valid_user = current_app.config.get('admin_user')
valid_pass = current_app.config.get('admin_pass')
if valid_user is None and valid_pass is None:
return True # no auth
user_roles = get_roles(username)
if username == valid_user and valid_pass == password and (
'admin' in user_roles or roles is None or any(
[r in roles for r in user_roles])):
return True # good auth
return False
def authenticate():
return Response(
'{"msg": "unauthorized"}', 401,
{})
def get_roles(username):
return ['admin']
def is_allowed(roles=None):
c = current_app.config
if 'admin_user' in c and 'admin_pass' in c:
auth = request.authorization
if not auth or not check_auth(auth.username,
auth.password,
roles):
return False
return True
class requires_auth(object):
def __init__(self, roles=None):
self.roles = roles
def __call__(self, f):
@wraps(f)
def decorated(*args, **kwargs):
if not is_allowed(self.roles):
return authenticate()
return f(*args, **kwargs)
return decorated
| {
"content_hash": "790b22d602ea94fb7c04e75c24b1b12f",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 62,
"avg_line_length": 27.5,
"alnum_prop": 0.5716363636363636,
"repo_name": "rcbops/opencenter",
"id": "f00ea439edc8ff2dac2fe541869495cde4e92254",
"size": "2683",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opencenter/webapp/auth.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "439541"
},
{
"name": "Shell",
"bytes": "5134"
}
],
"symlink_target": ""
} |
import datetime
import pytz
import web
import config
db = web.database(dbn='sqlite', db='dp.sqlite')
# =================================================================
# datetime
def current_time():
return datetime.datetime.now(pytz.utc)
def local_time():
return datetime.datetime.now(config.tz)
def input_date(s):
return config.tz.localize(datetime.datetime.strptime(s, config.date_fmt)).astimezone(pytz.utc)
def display_date(dt):
return dt.astimezone(config.tz).strftime(config.date_fmt)
def input_datetime(s):
return config.tz.localize(datetime.datetime.strptime(s, config.datetime_fmt)).astimezone(pytz.utc)
def display_datetime(dt):
return dt.astimezone(config.tz).strftime(config.datetime_fmt)
def store_datetime(dt):
return dt.strftime(config.db_fmt)
def load_datetime(s):
return pytz.utc.localize(datetime.datetime.strptime(s, config.db_fmt))
# datetime
# =================================================================
# pt
def pt_name(pt, first='firstname'):
if pt:
return pt.name
else:
return ''
def pt_name_search(q):
try:
id = int(q)
pt = get_pt(id)
if pt:
l = list()
l.append(get_pt(id))
return l
else:
return list()
except ValueError:
qs = q.split()
l = list()
for q in qs:
if l:
l.append(' and ')
l.append('name like ')
l.append(web.db.sqlquote('%%%s%%' % q))
query = web.db.SQLQuery(l)
return list(db.select('patient', where=query))
def get_pt(id):
try:
return db.where('patient', id=id)[0]
except IndexError:
return None
def get_family(resparty):
return db.where('patient', resparty=resparty)
def update_pt(f, resparty):
d = dict([(k, f[k].get_value())
for k in 'name','notes'])
d['id'] = f.id.get_value() or None
d['resparty'] = resparty
d['gender'] = dict(f='female', m='male')[f.gender.get_value()[0]]
d['birthday'] = model.display_date(model.input_date(f.birthday.get_value()))
db.query('insert or replace into patient (id, name, resparty, birthday, gender, notes) values ($id, $name, $resparty, $birthday, $gender, $notes)', d)
row = db.query('select last_insert_rowid() as id')[0]
if d['id'] is None and d['resparty'] is None:
db.update('patient', where='id=%d' % row.id, resparty=row.id)
return row.id
def get_latest_address(patientid):
addresses = db.where('journal', kind='address', patientid=patientid, order='ts DESC')
try:
return addresses[0]
except IndexError:
pt = get_pt(patientid)
if pt.resparty != pt.id:
return get_latest_address(pt.resparty)
else:
return None
# pt
# =================================================================
# journal
class new_handlers (web.storage):
@staticmethod
def address(journalid, form):
pass
@staticmethod
def email(journalid, form):
pass
@staticmethod
def phone(journalid, form):
pass
@staticmethod
def contact(journalid, form):
db.insert('contact', journalid=journalid, details=form.details.get_value())
@staticmethod
def progress(journalid, form):
db.insert('progress',
journalid=journalid,
sub=form.sub.get_value(),
obj=form.obj.get_value(),
ass=form.ass.get_value(),
pln=form.pln.get_value())
@staticmethod
def plan(journalid, form):
secondaryto = form.secondaryto.get_value()
if secondaryto:
secondaryto = int(secondaryto)
else:
secondaryto = None
# we already know this names a unique patient after form validation
insured = pt_name_search(form.insured.get_value())[0]
db.insert('plan',
journalid=journalid,
secondaryto=secondaryto,
carrierid=int(form.carrier.get_value()),
insuredid=insured.id,
relationship=form.relationship.get_value(),
groupnum=form.groupnum.get_value(),
idnum=form.idnum.get_value(),
employer=form.employer.get_value(),
deductible=float(form.deductible.get_value()),
maximum=float(form.maximum.get_value()),
prevent=int(form.prevent.get_value()),
basic=int(form.basic.get_value()),
major=int(form.major.get_value()),
notes=form.notes.get_value())
@staticmethod
def claim(journalid, form):
if form.planid.get_value():
planid = form.planid.get_value()
else:
planid = get_primary_plan_for_pt(int(form.patientid.get_value())).journalid
claimid = db.insert('claim',
journalid=journalid,
preauth=False,
planid=planid,
filed=store_datetime(current_time()),
closed=None,
notes=form.notes.get_value())
db.update('tx',
where='journalid is not null and claimid is null',
claimid=claimid)
@staticmethod
def Rx(journalid, form):
db.insert('rx',
journalid=journalid,
disp=form.disp.get_value(),
sig=form.sig.get_value(),
refills=form.refills.get_value())
@staticmethod
def doc(journalid, form):
filedir = 'upload'
data = form.file.get_value()
mime = magic.from_buffer(data, mime=True)
ext = mimetypes.guess_extension(mime) #includes the leading dot
fout = open('%s/%s%s' % (filedir, journalid, ext), 'wb')
fout.write(data)
fout.close()
@staticmethod
def appointment(journalid, form):
# TODO should appointments in the past be legal? how to fail?
# ... transactions!
dt = input_datetime(form.ts.get_value())
db.insert('appointment',
journalid=journalid,
duration=int(form.duration.get_value()),
kind=form.kind.get_value(),
status=form.status.get_value(),
notes=form.notes.get_value())
db.update('journal', where=('id=%d' % journalid), ts=store_datetime(dt))
def new_journal(pt, kind, f):
journalid = db.insert('journal',
patientid = pt.id,
ts = store_datetime(current_time()),
kind = kind,
summary = f.summary.get_value())
getattr(new_handlers, kind)(journalid, f)
return journalid
def get_journal(patientid, **kw):
d = dict()
if 'limit' in kw:
d['limit'] = kw.pop('limit')
if 'offset' in kw:
d['offset'] = kw.pop('offset')
if len(kw):
raise ValueError('cannot handle keyword arguments other than limit and offset')
# this query just smells expensive
return db.query('select *, (select sum(money) from journal where patientid=jj.patientid and ts <= jj.ts) as balance from journal as jj where patientid=%d order by ts desc' % patientid).list()
def get_journal_entry(journalid):
return db.where('journal', id=journalid)[0]
def get_contact(journalid):
return db.where('contact', journalid=journalid)[0]
def get_progress(journalid):
return db.where('progress', journalid=journalid)[0]
def get_Rx(journalid):
return db.where('Rx', journalid=journalid)[0]
def get_appointment(journalid):
return db.where('appointment', journalid=journalid)[0]
def get_posted_tx(journalid):
return db.where('tx', journalid=journalid).list()
def post_appointment(appt, journal, txids):
fee = db.query('select sum(fee) as fee from tx where appointmentid=%d' % appt.journalid)[0].fee
journalid = db.insert('journal',
patientid=journal.patientid,
ts=store_datetime(current_time()),
kind='tx',
summary=journal.summary,
money=fee)
db.update('tx', where='id in (%s)' % (','.join(map(str, txids))), journalid=journalid)
db.update('appointment', where='journalid=%d' % journal.id, status='posted')
# journal
# =================================================================
# txplan
def get_txplan(patientid):
return db.where('tx', patientid=patientid)
def tx_status(tx):
status = list()
if tx.journalid:
status.append('posted')
if tx.appointmentid:
status.append('scheduled')
if tx.claimid:
status.append('filed')
return ', '.join(status)
def new_tx(patientid, **kw):
return db.insert('tx', patientid=patientid, **kw)
def get_tx_for_appointment(appointmentid):
Q = web.db.SQLQuery
P = web.db.SQLParam
return db.select('tx',
where=Q(['appointmentid=',
P(appointmentid),
' or appointmentid is null']),
order='appointmentid DESC, id')
# txplan
# =================================================================
# appointment
def update_appt(journalid, form):
db.update('appointment',
where='journalid=%d' % journalid,
duration=int(form.duration.get_value()),
kind=form.kind.get_value(),
notes=form.notes.get_value())
db.update('journal',
where='id=%d' % journalid,
ts=store_datetime(input_datetime(form.ts.get_value())),
summary=form.summary.get_value())
def appts_on_day(dt):
start_day = dt.replace(hour=0, minute=0, second=0).astimezone(pytz.utc)
end_day = (dt + datetime.timedelta(seconds=86400)).replace(hour=0, minute=0, second=0).astimezone(pytz.utc)
Q = web.db.SQLQuery
P = web.db.SQLParam
print 'from', start_day
print 'to', end_day
return db.select(['journal','appointment'],
where=Q(['journal.kind=',P('appointment'),
'and ts>',P(store_datetime(start_day)),
'and ts<',P(store_datetime(end_day)),
'and journal.id=appointment.journalid']),
order='ts DESC').list()
def new_appt(patientid, dt, **kw):
at = dt.replace(second=0, microsecond=0, minute=(dt.minute - dt.minute%10)).astimezone(pytz.utc)
journalid = db.insert('journal', patientid=patientid, ts=store_datetime(at), kind='appointment', summary=kw.get('summary','test'))
if 'summary' in kw:
kw.pop('summary')
return db.insert('appointment', journalid=journalid, **kw)
def appt_tx_set(appointmentid, txs):
db.update('tx',
where='appointmentid = %d' % appointmentid,
appointmentid=None)
db.update('tx',
where='id in %s' % str(tuple(txs)),
appointmentid=appointmentid)
# appointment
# =================================================================
# carriers
def get_carriers():
return db.select('carrier', order='name ASC')
def get_carrier(id):
return db.where('carrier', id=id)[0]
def new_carrier(form):
return db.insert('carrier',
name=form.name.get_value(),
address=form.address.get_value(),
phone=form.phone.get_value(),
web=form.web.get_value(),
eclaim=form.eclaim.get_value())
# carriers
# =================================================================
# plans
def get_plan(id):
return db.select(['journal', 'plan'], where='plan.journalid=journal.id and journalid=%d' % id)[0]
def get_primary_plan_for_pt(patientid):
plan = db.select(['journal','plan'],
where='plan.journalid=journal.id and plan.secondaryto is null',
order='ts DESC', limit=1)[0]
return plan
# plans
# =================================================================
# claims
def get_claim(claimid):
return db.where('claim', journalid=claimid)[0]
def get_tx_for_claim(claimid):
return db.where('tx', claimid=claimid).list()
def new_payment_for_pt(pt, summary, amount):
journalid = db.insert('journal',
patientid=pt.id,
ts=store_datetime(current_time()),
kind='payment',
summary=summary,
money=amount)
return journalid
# claims
# =================================================================
| {
"content_hash": "0a5bd8be850403f10bb9e4bbfef231ad",
"timestamp": "",
"source": "github",
"line_count": 400,
"max_line_length": 195,
"avg_line_length": 31.9975,
"alnum_prop": 0.5423079928119384,
"repo_name": "homey1337/efnpractice",
"id": "faabcc00c018f12465816ed939119c6b93a91ca9",
"size": "12799",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "44388"
}
],
"symlink_target": ""
} |
import os
# from global_settings import PROJECT_PATH
# Uncomment to put the application in non-debug mode. This is useful
# for testing error handling and messages.
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# Override this to match the application endpoint
FORCE_SCRIPT_NAME = ''
# Non-restricted email port for development, run in a terminal:
# python -m smtpd -n -c DebuggingServer localhost:1025
EMAIL_PORT = 1025
EMAIL_SUBJECT_PREFIX = '[fabrydb Local] '
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES = {}
DATABASES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static asset configuration
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = '/app/_site/static'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
# STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Staticfiles_STORAGE = 'django.contrib.staticfiles.storage.CachedStaticFilesStorage'
# This is used as a "seed" for various hashing algorithms. This must be set to
# a very long random string (40+ characters)
SECRET_KEY = 'secret'
| {
"content_hash": "bee4cbbfb3bae99680aed9ab06c8c93e",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 85,
"avg_line_length": 29.127659574468087,
"alnum_prop": 0.747991234477721,
"repo_name": "glabilloy/fabrydb",
"id": "d8c9ba372ff1a896209ecf9da8bb4196e6ca7db1",
"size": "1369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fabrydb/conf/local_settings_heroku.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "82646"
},
{
"name": "HTML",
"bytes": "681369"
},
{
"name": "JavaScript",
"bytes": "3285402"
},
{
"name": "Makefile",
"bytes": "997"
},
{
"name": "Python",
"bytes": "56716"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('foirequest', '0023_remove_foimessage_is_postal'),
]
operations = [
migrations.RemoveField(
model_name='foimessage',
name='original',
),
migrations.AlterField(
model_name='deliverystatus',
name='status',
field=models.CharField(blank=True, choices=[('unknown', 'unknown'), ('sending', 'sending'), ('sent', 'sent'), ('received', 'received'), ('read', 'read'), ('deferred', 'deferred'), ('bounced', 'bounced'), ('expired', 'expired'), ('failed', 'failed')], max_length=32),
),
]
| {
"content_hash": "7a7fa798d499aaccbd262b40d303254f",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 278,
"avg_line_length": 33.40909090909091,
"alnum_prop": 0.5795918367346938,
"repo_name": "stefanw/froide",
"id": "9e0fdffd41a49c291ce29c17da3e3c80a71b25e9",
"size": "809",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "froide/foirequest/migrations/0024_auto_20180710_1025.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17807"
},
{
"name": "HTML",
"bytes": "161162"
},
{
"name": "Java",
"bytes": "287939"
},
{
"name": "JavaScript",
"bytes": "1325034"
},
{
"name": "Makefile",
"bytes": "329"
},
{
"name": "Python",
"bytes": "1642783"
},
{
"name": "Shell",
"bytes": "1621"
}
],
"symlink_target": ""
} |
discord_bot_token = 'YOUR DISCORD BOT TOKEN'
bot_description = 'ENTER THE BOT DESCRIPTION HERE'
discord_channel = '' # Channel on which you want to post the videos
# (in Discord -> Dev Mode activated, right click on channel and copy ID
yt_api_key = 'ENTER YOUR YT API KEY HERE'
# Add the Channel ID here (as part of the channel url), begins with 'UC'
# Seperate multiple IDs with comma
yt_channel_id = ['', # First ID
'' # Second ID, etc...
]
# Same as above
dict_channels = {'ENTER CHANNEL ID HERE': '', # First Channel ID
'ENTER CHANNEL ID HERE': '', # Second Channel ID, etc...
}
# Wait between search intervals
sleep_time = 60 # seconds
| {
"content_hash": "fa7aa9919d6c6895f3ddb6e20c2c6f5e",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 74,
"avg_line_length": 34.23809523809524,
"alnum_prop": 0.6258692628650904,
"repo_name": "Projekt95/Discord-YT-Checker",
"id": "c6b9ea1bd06e8fe43c584f714c0fe449edc96c13",
"size": "768",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Data/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3000"
}
],
"symlink_target": ""
} |
import curses
class Colors(object):
def register(self):
"""Registers curses color pairs for later usage"""
# Header
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
# Selected Menu Entry
curses.init_pair(2, curses.COLOR_GREEN, curses.COLOR_BLACK)
def color_header(self):
"""returns the curses color pair intended for header usage"""
return curses.color_pair(1)
def color_selected_menu_entry(self):
"""returns the curses color pair intended for selected color entries"""
return curses.color_pair(2)
| {
"content_hash": "5743d7eefecf60f859d07a44cbd56dd5",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 79,
"avg_line_length": 29.85,
"alnum_prop": 0.6549413735343383,
"repo_name": "omgwtflaserguns/matomatpy",
"id": "feb28ab0d91c6e33ab9e2af67feedc85d8408da9",
"size": "597",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "matomat/ui/colors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19025"
}
],
"symlink_target": ""
} |
"""Virtual filesystem module based on pyfsntfs."""
import stat
from typing import Any, Callable, Dict, Iterable, Optional, Text, Type
import pyfsntfs
from grr_response_client import client_utils
from grr_response_client.vfs_handlers import base as vfs_base
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr_response_core.lib.rdfvalues import paths as rdf_paths
# Caches pyfsntfs.volume instances.
MOUNT_CACHE = utils.TimeBasedCache()
# See
# https://github.com/libyal/libfsntfs/blob/master/documentation/New%20Technologies%20File%20System%20(NTFS).asciidoc#file_attribute_flags
FILE_ATTRIBUTE_READONLY = 0x00000001
FILE_ATTRIBUTE_HIDDEN = 0x00000002
def _GetAlternateDataStreamCaseInsensitive(
fd: pyfsntfs.file_entry, name: Text) -> Optional[pyfsntfs.data_stream]:
name = name.lower()
for data_stream in fd.alternate_data_streams:
if data_stream.name.lower() == name:
return data_stream
class NTFSFile(vfs_base.VFSHandler):
"""VFSHandler implementation based on pyfsntfs."""
supported_pathtype = rdf_paths.PathSpec.PathType.NTFS
def __init__(self,
base_fd: Optional[vfs_base.VFSHandler],
handlers: Dict[Any, Type[vfs_base.VFSHandler]],
pathspec: Optional[rdf_paths.PathSpec] = None,
progress_callback: Optional[Callable[[], None]] = None):
super().__init__(
base_fd,
handlers=handlers,
pathspec=pathspec,
progress_callback=progress_callback)
# self.pathspec is initialized to a copy of base_fd
if base_fd is None:
raise ValueError("NTFS driver must have a file base.")
elif isinstance(base_fd, NTFSFile) and base_fd.IsDirectory():
self.volume = base_fd.volume
last_path = utils.JoinPath(self.pathspec.last.path, pathspec.path)
# Replace the last component with this one.
self.pathspec.Pop(-1)
self.pathspec.Append(pathspec)
self.pathspec.last.path = last_path
elif not base_fd.IsDirectory():
cache_key = base_fd.pathspec.SerializeToBytes()
try:
self.volume = MOUNT_CACHE.Get(cache_key)
except KeyError:
self.volume = pyfsntfs.volume()
self.volume.open_file_object(base_fd)
MOUNT_CACHE.Put(cache_key, self.volume)
self.pathspec.Append(pathspec)
elif base_fd.IsDirectory():
raise IOError("Base must be a file.")
self.fd = None
self.data_stream = None
# Try to open by "inode" number.
if pathspec is not None and pathspec.HasField("inode"):
# The lower 48 bits of the file_reference are the MFT index.
mft_index = pathspec.inode & ((1 << 48) - 1)
self.fd = self.volume.get_file_entry(mft_index)
# If the file_reference changed, then the MFT entry points now to
# a different file. Reopen it by path.
if self.fd is not None and self.fd.file_reference != pathspec.inode:
self.fd = None
# Try to open by path
if self.fd is None:
path = self.pathspec.last.path
path = path.replace("/", "\\")
self.fd = self.volume.get_file_entry_by_path(path)
if self.fd is None:
raise IOError("Failed to open {}".format(path))
# Determine data stream
if pathspec is not None and pathspec.HasField("stream_name"):
if pathspec.path_options == rdf_paths.PathSpec.Options.CASE_LITERAL:
self.data_stream = self.fd.get_alternate_data_stream_by_name(
pathspec.stream_name)
else:
self.data_stream = _GetAlternateDataStreamCaseInsensitive(
self.fd, pathspec.stream_name)
if self.data_stream is None:
raise IOError("Failed to open data stream {} in {}.".format(
pathspec.stream_name, path))
self.pathspec.last.stream_name = self.data_stream.name
else:
if self.fd.has_default_data_stream():
self.data_stream = self.fd
# self.pathspec will be used for future access to this file.
# The name is now literal, so disable case-insensitive lookup (expensive).
self.pathspec.last.path_options = rdf_paths.PathSpec.Options.CASE_LITERAL
# Access the file by file_reference, to skip path lookups.
self.pathspec.last.inode = self.fd.file_reference
if not self.IsDirectory():
if self.data_stream is not None:
self.size = self.data_stream.get_size()
else:
self.size = 0
def Stat(self,
ext_attrs: bool = False,
follow_symlink: bool = True) -> rdf_client_fs.StatEntry:
return self._Stat(self.fd, self.data_stream, self.pathspec.Copy())
def Read(self, length: int) -> bytes:
self._CheckIsFile()
self.data_stream.seek(self.offset)
data = self.data_stream.read(length)
self.offset += len(data)
return data
def IsDirectory(self) -> bool:
return self.fd.has_directory_entries_index()
def ListFiles(self,
ext_attrs: bool = False) -> Iterable[rdf_client_fs.StatEntry]:
del ext_attrs # Unused.
self._CheckIsDirectory()
for entry in self.fd.sub_file_entries:
pathspec = self.pathspec.Copy()
pathspec.last.path = utils.JoinPath(pathspec.last.path, entry.name)
pathspec.last.inode = entry.file_reference
pathspec.last.options = rdf_paths.PathSpec.Options.CASE_LITERAL
data_stream = entry if entry.has_default_data_stream() else None
yield self._Stat(entry, data_stream, pathspec.Copy())
# Create extra entries for alternate data streams
for data_stream in entry.alternate_data_streams:
pathspec.last.stream_name = data_stream.name
yield self._Stat(entry, data_stream, pathspec.Copy())
def ListNames(self) -> Iterable[Text]:
self._CheckIsDirectory()
for entry in self.fd.sub_file_entries:
yield entry.name
def _CheckIsDirectory(self) -> None:
if not self.IsDirectory():
raise IOError("{} is not a directory".format(
self.pathspec.CollapsePath()))
def _CheckIsFile(self) -> None:
if self.IsDirectory():
raise IOError("{} is not a file".format(self.pathspec.CollapsePath()))
def _Stat(
self,
entry: pyfsntfs.file_entry,
data_stream: pyfsntfs.data_stream,
pathspec: rdf_paths.PathSpec,
) -> rdf_client_fs.StatEntry:
st = rdf_client_fs.StatEntry()
st.pathspec = pathspec
st.st_atime = rdfvalue.RDFDatetimeSeconds.FromDatetime(
entry.get_access_time())
st.st_mtime = rdfvalue.RDFDatetimeSeconds.FromDatetime(
entry.get_modification_time())
st.st_btime = rdfvalue.RDFDatetimeSeconds.FromDatetime(
entry.get_creation_time())
st.st_ctime = rdfvalue.RDFDatetimeSeconds.FromDatetime(
entry.get_entry_modification_time())
if entry.has_directory_entries_index():
st.st_mode = stat.S_IFDIR
else:
st.st_mode = stat.S_IFREG
if data_stream is not None:
st.st_size = data_stream.get_size()
flags = entry.file_attribute_flags
st.st_mode |= stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
if (flags & FILE_ATTRIBUTE_READONLY) == 0:
st.st_mode |= stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
if (flags & FILE_ATTRIBUTE_HIDDEN) == 0:
st.st_mode |= stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
return st
@classmethod
def Open(
cls,
fd: Optional[vfs_base.VFSHandler],
component: rdf_paths.PathSpec,
handlers: Dict[Any, Type[vfs_base.VFSHandler]],
pathspec: Optional[rdf_paths.PathSpec] = None,
progress_callback: Optional[Callable[[], None]] = None
) -> Optional[vfs_base.VFSHandler]:
# A Pathspec which starts with NTFS means we need to resolve the mount
# point at runtime.
if (fd is None and
component.pathtype == rdf_paths.PathSpec.PathType.NTFS and
pathspec is not None):
# We are the top level handler. This means we need to check the system
# mounts to work out the exact mount point and device we need to
# open. We then modify the pathspec so we get nested in the raw
# pathspec.
raw_pathspec, corrected_path = client_utils.GetRawDevice(component.path) # pytype: disable=attribute-error
# Insert the raw device before the component in the pathspec and correct
# the path
component.path = corrected_path
pathspec.Insert(0, component)
pathspec.Insert(0, raw_pathspec)
# Allow incoming pathspec to be given in the local system path
# conventions.
for component in pathspec:
if component.path:
component.path = client_utils.LocalPathToCanonicalPath(component.path)
# We have not actually opened anything in this iteration, but modified the
# pathspec. Next time we should be able to open it properly.
return fd
# If an inode is specified, just use it directly.
# This is necessary so that component.path is ignored.
elif component.HasField("inode"):
return NTFSFile(
fd, handlers, component, progress_callback=progress_callback)
else:
return super(NTFSFile, cls).Open(
fd=fd,
component=component,
handlers=handlers,
pathspec=pathspec,
progress_callback=progress_callback)
| {
"content_hash": "ef347db7528f0a599f9a2a28d244a778",
"timestamp": "",
"source": "github",
"line_count": 251,
"max_line_length": 137,
"avg_line_length": 36.91235059760956,
"alnum_prop": 0.6700485698866703,
"repo_name": "google/grr",
"id": "779e4c457ab06829185cfb516c1a84b32d55cded",
"size": "9287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "grr/client/grr_response_client/vfs_handlers/ntfs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "12697"
},
{
"name": "C++",
"bytes": "54814"
},
{
"name": "Dockerfile",
"bytes": "1822"
},
{
"name": "HCL",
"bytes": "8451"
},
{
"name": "HTML",
"bytes": "366783"
},
{
"name": "JavaScript",
"bytes": "13088"
},
{
"name": "Jupyter Notebook",
"bytes": "199216"
},
{
"name": "Makefile",
"bytes": "3244"
},
{
"name": "PowerShell",
"bytes": "531"
},
{
"name": "Python",
"bytes": "8844725"
},
{
"name": "Roff",
"bytes": "444"
},
{
"name": "SCSS",
"bytes": "105120"
},
{
"name": "Shell",
"bytes": "48663"
},
{
"name": "Standard ML",
"bytes": "8172"
},
{
"name": "TypeScript",
"bytes": "2139377"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('stateInfo', '0003_auto_20151006_1818'),
]
operations = [
migrations.AlterField(
model_name='county',
name='state',
field=models.ForeignKey(default=1, to='stateInfo.State'),
preserve_default=True,
),
]
| {
"content_hash": "8ba392425d1f7f0d76110adcb10a5ddd",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 69,
"avg_line_length": 22.789473684210527,
"alnum_prop": 0.5935334872979214,
"repo_name": "antoineclaval/ruralpowerproject",
"id": "ff21df5a48b702ebc49140674e33e911ce2b0a23",
"size": "457",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ruralpowerproject/stateInfo/migrations/0004_auto_20151006_1950.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2614"
},
{
"name": "HTML",
"bytes": "23995"
},
{
"name": "JavaScript",
"bytes": "3257"
},
{
"name": "PLpgSQL",
"bytes": "75440"
},
{
"name": "Python",
"bytes": "58310"
}
],
"symlink_target": ""
} |
class ParserError(Exception):
pass
class ParserJSONLoadingError(ParserError):
def __init__(self, json_decode_error):
self.json_decode_error = json_decode_error
self.msg = 'Failed Loading Scenario JSON\n' + \
str(json_decode_error)
class ParserJSONValidationError(ParserError):
def __init__(self, validation_error):
self.validation_error = validation_error
self.msg = validation_error.message
| {
"content_hash": "675efb733b5a965e4cbb15bad9d8ad6e",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 55,
"avg_line_length": 30.6,
"alnum_prop": 0.6710239651416122,
"repo_name": "shlomihod/scenario",
"id": "f2f0623a363a4c1e540d24bfbd9318095854a94e",
"size": "459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scenario/parser/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8315"
},
{
"name": "HTML",
"bytes": "5323"
},
{
"name": "JavaScript",
"bytes": "108108"
},
{
"name": "Python",
"bytes": "42391"
}
],
"symlink_target": ""
} |
from util import crypto
from util import time_
from util import urls
TOKEN_EXPIRY_SECS = 60 * 60 * 24 * 7 # 1 week
def generate_unsubscribe_url(email):
token = generate_email_token(email)
return urls.absurl(urls.append_params('/reminder/unsubscribe', dict(
email=email, token=token)))
def verify_email_token(token, email):
email = email.strip().lower()
try:
msg = crypto.decrypt(token)
except:
return False
parts = msg.split('|||')
token_email, token_timestamp = parts[0], int(parts[1])
return (token_email == email
and time_.current_timestamp() - token_timestamp < TOKEN_EXPIRY_SECS)
def generate_email_token(email):
email = email.strip().lower()
timestamp = time_.current_timestamp()
msg = '%s|||%d' % (email, timestamp)
return crypto.encrypt(msg)
| {
"content_hash": "901771b6d1916aae913e9b5f7c669ed4",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 76,
"avg_line_length": 30.925925925925927,
"alnum_prop": 0.6550898203592814,
"repo_name": "jlgoldman/writetogov",
"id": "e8cbdc641ab7bd65b82b0850b7b2a282d31cec3a",
"size": "835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "logic/reminder_logic.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "7583"
},
{
"name": "HTML",
"bytes": "50704"
},
{
"name": "JavaScript",
"bytes": "23391"
},
{
"name": "Python",
"bytes": "132147"
},
{
"name": "Shell",
"bytes": "745"
}
],
"symlink_target": ""
} |
import maya.cmds as cmds
import tempfile
import base64
import os
from maya.app.general.mayaMixin import MayaQWidgetDockableMixin
import maya.OpenMayaUI as OpenMayaUI
import PySide2.QtCore as QtCore
import PySide2.QtGui as QtGui
import PySide2.QtWidgets as QtWidgets
import shiboken2
import mtoa.ui.arnoldmenu as arnoldmenu
class QGet(QtCore.QObject):
def __init__(self, parent=None):
super(QGet, self).__init__(parent=parent)
ptr = OpenMayaUI.MQtUtil.mainWindow()
mayaMainWindow = shiboken2.wrapInstance(long(ptr), QtWidgets.QMainWindow)
self.allWidgets = QtWidgets.QApplication.allWidgets
self.mayaMainWindow = mayaMainWindow
self.QRenderView = None
self.QRenderViewControl = None
self.widget = None
def _printInfo(self, obj):
print 'objectName:'
print obj.objectName()
print 'windowTitle:'
print obj.windowTitle()
print 'Type:'
print type(obj)
print 'dir():'
print dir(obj)
print 'children():'
print obj.children()
print 'parent:'
print obj.parent()
def getQRenderView(self, printInfo=False, query=False):
def _set():
for obj in self.allWidgets():
if type(obj) is QtWidgets.QMainWindow:
if obj.windowTitle() == 'Arnold Render View':
self.QRenderView = obj
break
for obj in self.allWidgets():
if type(obj) is QtWidgets.QWidget:
if obj.windowTitle() == 'Arnold RenderView':
self.QRenderViewControl = obj
break
_set()
if self.QRenderView is None and query is False:
arnoldmenu.arnoldMtoARenderView()
_set()
if printInfo:
self._printInfo(self.QRenderView)
return self.QRenderView
def getByWindowTitle(self, string):
for obj in self.allWidgets():
if type(obj) is QtWidgets.QWidget:
if obj.windowTitle() == string:
self.widget = obj
return self.widget
def getByObjectName(self, string):
for obj in self.allWidgets:
if type(obj) is QtWidgets.QWidget:
if obj.objectName() == string:
self.widget = obj
return self.widget
class AssetsLayoutWindow(MayaQWidgetDockableMixin, QtWidgets.QWidget):
toolName = 'assetsLayoutWidget'
def __init__(self, parent=None):
self.deleteInstances()
super(AssetsLayoutWindow, self).__init__(parent=parent)
ptr = OpenMayaUI.MQtUtil.mainWindow()
self.mayaMainWindow = shiboken2.wrapInstance(long(ptr), QtWidgets.QMainWindow)
self.setWindowFlags(QtCore.Qt.Window)
self.setWindowTitle('Assets Layout')
self.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
self.setObjectName(self.__class__.toolName)
self.setContentsMargins(0,0,0,0)
# Set window Layout
QHBoxLayout = QtWidgets.QHBoxLayout()
QHBoxLayout.setObjectName('%s%s'%(self.__class__.toolName,'QHBoxLayout'))
QHBoxLayout.setContentsMargins(0,0,0,0)
self.setLayout(QHBoxLayout)
self.QHBoxLayout = QHBoxLayout
QTabWidget = QtWidgets.QTabWidget()
QTabWidget.setObjectName('%s%s'%(self.__class__.toolName,'QTabWidget'))
QTabWidget.setContentsMargins(0,0,0,0)
self.QHBoxLayout.addWidget(QTabWidget)
self.QTabWidget = QTabWidget
def hideEvent(self, event):
"""
On a hideEvent unparent the render view.
This is needed to avoid a maya crash.
"""
print 'Assets Layout was closed.'
o = QGet()
o.getQRenderView(query=True)
if o.QRenderViewControl is not None:
o.QRenderViewControl.setParent(o.mayaMainWindow, QtCore.Qt.Window)
o.QRenderViewControl.hide()
else:
if o.QRenderView is not None:
o.QRenderView.setParent(o.mayaMainWindow, QtCore.Qt.Window)
o.QRenderView.hide()
uvEditor = o.getByWindowTitle('UV Editor')
if uvEditor is not None:
uvEditor.setParent(o.mayaMainWindow, QtCore.Qt.Window)
uvEditor.hide()
def paintEvent(self, event):
pass
def deleteInstances(self):
o = QGet()
# Delete the workspaceControl
control = self.__class__.toolName + 'WorkspaceControl'
if cmds.workspaceControl(control, q=True, exists=True):
cmds.workspaceControl(control, e=True, close=True)
print 'Deleting control {0}'.format(control)
cmds.deleteUI(control, control=True)
# Delete the instance
for obj in o.allWidgets():
if obj.objectName() == self.__class__.toolName:
cmds.workspaceControl(self.__class__.toolName + 'WorkspaceControl', query=True, exists=True)
print 'Deleting instance {0}'.format(obj)
# Delete it for good
obj.setParent(None)
obj.deleteLater()
class EventFilter(QtCore.QObject):
"""
Event filter which emits a parent_closed signal whenever
the monitored widget closes.
via:
https://github.com/shotgunsoftware/tk-maya/blob/master/python/tk_maya/panel_util.py
"""
def set_associated_widget(self, widget_id):
"""
Set the widget to effect
"""
self._widget_id = widget_id
def eventFilter(self, obj, event):
print event.type()
"""
QT Event filter callback
:param obj: The object where the event originated from
:param event: The actual event object
:returns: True if event was consumed, False if not
"""
if event.type() == QtCore.QEvent.Type.Close:
print 'CloseEvent'
return False
def assetWindow(*args):
windowID = 'assetsWindow'
window = None
windowMargin = (0, 0)
windowFrameWidth = 8
windowTitleBarHeight = 30
ptr = OpenMayaUI.MQtUtil.mainWindow()
mayaMainWindow = shiboken2.wrapInstance(long(ptr), QtWidgets.QMainWindow)
window = AssetsLayoutWindow()
window.show(dockable=True)
o = QGet()
o.getQRenderView()
if o.QRenderViewControl is not None:
o.QRenderViewControl.hide()
window.QTabWidget.insertTab(0, o.QRenderViewControl, '&Arnold IPR')
o.QRenderViewControl.show()
# # UV Editor
uvEditorQt = o.getByWindowTitle('UV Editor')
if uvEditorQt is None:
cmds.TextureViewWindow()
uvEditorQt = o.getByWindowTitle('UV Editor')
if uvEditorQt is not None:
window.QTabWidget.insertTab(0, uvEditorQt, '&UV Editor')
# Render Setup Utility
import RenderSetupUtility.main.ui as ui; ui.createUI()
rsUtilQt = o.getByWindowTitle('Render Setup Utility')
rsUtilQt.setFixedWidth(375)
rsUtilQt.setObjectName('rsUtilityWidget')
window.QHBoxLayout.insertWidget(0, rsUtilQt)
win = o.getByWindowTitle('Assets Layout')
if QtWidgets.QDesktopWidget().screenCount() == 1:
win.show();
if QtWidgets.QDesktopWidget().screenCount() >= 2:
win.showFullScreen()
_setScreenSizeWidget(win)
def _setScreenSizeWidget(obj):
if QtWidgets.QDesktopWidget().screenCount() >= 2:
cp = QtWidgets.QDesktopWidget().screenGeometry(1)
obj.setGeometry(cp)
assetWindow()
| {
"content_hash": "d8f10337d5d50e16dc73d9426abe439b",
"timestamp": "",
"source": "github",
"line_count": 225,
"max_line_length": 108,
"avg_line_length": 33.39111111111111,
"alnum_prop": 0.6261147344602689,
"repo_name": "wgergely/After-Effects",
"id": "e528050cf3e7f5fe145d3aebae1b89c3b9d30a23",
"size": "7513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Maya/gwCustomShelf/gwCustomShelf_assetsLayout.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "505980"
}
],
"symlink_target": ""
} |
try :
# standard lib
import os
import importlib
import socket
import signal
from datetime import datetime
from io import StringIO
import tornado.gen
import sys
import time
import json
import pprint
from bson.son import SON
from time import sleep
import requests
import configparser
# tornado
import tornado
import tornado.web
import tornado.websocket
import tornado.httpserver
from tornado.queues import Queue
from tornado.ioloop import PeriodicCallback
from tornado.locks import Semaphore
from tornado.concurrent import run_on_executor
from concurrent.futures import ThreadPoolExecutor
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado import gen
# chilkat
import chilkat
# faux interrupts; thriftypy
import thriftpy
from thriftpy.rpc import make_client
except Exception as e :
with open( '/var/log/chromatic/module_error_trinity_async' , 'w' ) as f :
sleep( 2 )
f.write( str( e ) )
f.write( str( '\n' ) )
#motor
import motor.motor_tornado
cci_mongo = None
db = None
# cci
from cci_mta_trinity.application import app as trinity_app
from cci_mta_trinity.application_vulture import app ,\
mongo_no_resource_exception , \
_logger
from cci_mta_trinity.streams import tr_kafka_intf , \
tr_mongo_fetch , \
tr_mongo_search
# file scope
callback_class_dispatch = { 'document' : 'tr_payload_stalker' ,
'stream' : 'tr_stream-stalker' }
# exclusive
policy_semaphore = Semaphore( 1 )
g_periodic_callbacks = dict()
probe_thred = None
http_tunnel_pid = None
default_stream_proxy_port = 7082
default_document_proxy_port = 7083
const_tunnel_process = 'cci-cci_mta_trinity-tunnel'
stream_bootstrap = None
default_policy_j = None
db_bootstrap = None
kp = None
TEMPLATE_PATH = os.path.join(os.path.join(os.path.dirname(__file__) , 'templates') )
max_wait_seconds_before_shutdown = 3
socket_msg = ['app_services' , 'async_services' , 'tunnel_services']
trinity_push_q = Queue()
max_wait_seconds_before_shutdown = 3
http_server = None
trinity_push_clients = []
mta_thrift_interrupt = None
mta_client = None
interrupt_retry = 3
kernel_host_endpoint = None
kernel_host_port = None
# ---------------------------------------------------------------------------------------------
def trinity_push_msg( message ) :
for client in trinity_push_clients :
client.write_message( message )
# ---------------------------------------------------------------------------------------------
class trinity_push_handler( tornado.websocket.WebSocketHandler ) :
def open( self ) :
"""
open
"""
self.write_message( socket_msg[1] + " :...this is the cci mta async server speaking from a web socket on port 7082..." )
self.write_message( socket_msg[0] + " :...this is the cci mta app server speaking from a web socket on port 7082..." )
self.write_message( socket_msg[2] + " :...this is cci tunnel services speaking from a web socket on port 7082..." )
trinity_push_clients.append( self )
def on_message( self , message ) :
"""
on_message
"""
self.write_message( 'async_services:...ack...%s...' % str( message ) )
def on_close(self):
"""
on_close
"""
if self in trinity_push_clients :
trinity_push_clients.remove( self )
@classmethod
def broadcast_to_clients( cls ) :
"""
:class:
:return:
"""
for client in trinity_push_clients:
client.write_message( '...broadcast....' + retr_local_ip_info() )
# ---------------------------------------------------------------------------------------------
def retr_local_ip_info() :
"""
:return lovsl ip:
"""
# local
local_ip = '0.0.0.0'
s = socket.socket( socket.AF_INET , socket.SOCK_DGRAM )
try:
local_ip = [l for l in ([ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")][:1],
[[(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET,
socket.SOCK_DGRAM)]][0][1]]) if l][0][0]
return local_ip
except :
# give it up for a lost cause
pass
finally:
s.close()
return local_ip
# --------------------------------------------------------------------------------------
class vulture_index( tornado.web.RequestHandler ) :
@tornado.gen.coroutine
def get( self ) :
try :
s = 'http://%s:7080/mongo/imap2017_db_connection_info' % retr_local_ip_info()
self.redirect( s )
except :
self.write( '..redirect failed..' )
# --------------------------------------------------------------------------------------
class queue_client() :
def __init__(self):
self.queued_items = Queue()
@tornado.gen.coroutine
def watch_queue(self):
try :
while True:
items = yield self.queued_items.get()
if items['moniker'] in g_periodic_callbacks :
_logger.info( '..%s policy %s already in effect' % ( items['provider_type'] ,
items['moniker'] ) )
continue
pc = PeriodicCallback( lambda: policy_callback( items['provider_type'] ,
items['moniker'] ,
items['db_bootstrap']) , int( items['interval'] ) * 1000 )
_logger.info( '..started periodic callback with params%s' % json.dumps( items ) )
pc.start()
with ( yield policy_semaphore.acquire() ) :
id = '%s-%s' % ( items['moniker'] , items['provider_type'] )
g_periodic_callbacks[id] = pc
except Exception as e :
_logger.error( 'watch_queue: %s' % str( e ) )
finally :
policy_semaphore.release()
# --------------------------------------------------------------------------------------
class queue_search_client() :
def __init__(self) :
self.queued_items = Queue()
self.mime = chilkat.CkMime()
success = self.mime.UnlockComponent( "SFCNICOMSMIME_sK7NCgi98C5p" )
if success != True:
print( self.mime.lastErrorText() )
print( "unlock failed." )
@tornado.gen.coroutine
def watch_search_queue(self) :
r = None
try :
while True:
items = yield self.queued_items.get()
data = json.dumps( items )
print( data )
except Exception as e :
_logger.error( 'session_search_queue: %s' % e )
# --------------------------------------------------------------------------------------
class queue_stream_client() :
def __init__(self):
self.queued_items = Queue()
'''
self.mime = chilkat.CkMime()
success = self.mime.UnlockComponent( "SFCNICOMSMIME_sK7NCgi98C5p" )
if success != True:
print( self.mime.lastErrorText() )
print( "unlock failed." )
sys.exit()
'''
pass
@tornado.gen.coroutine
def watch_stream_queue(self) :
try :
while True:
items = yield self.queued_items.get()
print( 'glad!' )
# memory_f = StringIO ( items['resource'] )
#b_ret = self.mime.LoadMimeBytes( memory_f.getValue() )
#if b_ret is True :
# _logger.info( '..peeking payload....' )
except Exception as e :
_logger.error( 'watch_stream_queue: %s' % e )
# --------------------------------------------------------------------------------------
class stream_queue_handler_post_msg( tornado.web.RequestHandler ) :
@tornado.gen.coroutine
def post( self ) :
"""
:return:
"""
try :
json_data = json.loads( self.request.body )
_logger.info( 'stream_queue_handler_post_msg: posting to quue....' )
yield stream_client.queued_items.put( json_data )
except Exception as e :
_logger.error( 'stream_queue_handler_post_msg: %s' % e )
# --------------------------------------------------------------------------------------
class search_gadget_queue_handler( tornado.web.RequestHandler ) :
@tornado.gen.coroutine
def post( self ) :
"""
:return:
"""
try :
json_data = json.loads( self.request.body )
yield search_client.queued_items.put( json_data )
_logger.info( 'session queued a new item: %s' % self.request.body )
self.write( 'queued a new item: %s' % self.request.body )
except Exception as e :
_logger.error( 'session_queue_handler_update_session: %s' % str( e ) )
# --------------------------------------------------------------------------------------
class queue_handler_start_policy( tornado.web.RequestHandler ) :
@tornado.gen.coroutine
def post( self ) :
"""
:return:
"""
try :
json_data = json.loads( self.request.body )
print ( json_data )
yield client.queued_items.put( json_data )
_logger.info( 'queued a new item: %s' % self.request.body )
self.write( 'queued a new item: %s' % self.request.body )
except Exception as e :
_logger.error( 'queue_handler_start_policy: %s' % str( e ) )
# --------------------------------------------------------------------------------------
class queue_handler_stop_policy( tornado.web.RequestHandler ) :
@tornado.gen.coroutine
def post( self ) :
"""
:return:
"""
json_data = json.loads( self.request.body )
moniker = json_data['moniker']
id = '%s-%s' % ( json_data['moniker'] , json_data['provider_type'] )
if id in g_periodic_callbacks :
g_periodic_callbacks[id].stop()
_logger.info( '...stopped...: %s' % id )
self.write( '...stopped...: %s' % id )
else :
_logger.warning( '...could not stop...policy not started: %s' % id )
self.write( '...could not stop...policy not started: %s' % moniker )
# --------------------------------------------------------------------------------------
def create_ssh_tunnel_callback() :
"""
:return:
"""
pass
# --------------------------------------------------------------------------------------
@tornado.gen.coroutine
def post_faux_interrupt( moniker , **kargs ) :
"""
:param moniker:
:param kargs:
:return:
"""
mta_client = None
try :
_logger.info( '...faux interrupt....' )
mta_thrift_interrupt = thriftpy.load( 'cci_stream_mta_syscall.thrift' ,
module_name = 'mta_thrift')
mta_client = make_client( mta_thrift_interrupt.cci_stream_mta_syscall ,
'127.0.0.1' ,
7092 )
_logger.info( '...post_faux_interrupt: %s', moniker )
if moniker == 'ping' :
if mta_client.ping() == 0 :
_logger.info( '...touched mta syscall....checking version..' )
params = dict()
version = '0.8.4'
params['version_info'] = version
if mta_client.stat_version( params ) is True :
_logger.info( '...stat version %s ok...' % version )
else :
_logger.info( '...stat version %s failed..' % version )
elif moniker == 'unload_plugin':
mta_client.unload_plugin( 'stop' )
except Exception as e :
_logger.error( str( e ) )
finally :
if not mta_client is None :
mta_client.close()
# --------------------------------------------------------------------------------------
def init_update_callback() :
"""
:return:
"""
try :
trinity_push_msg( socket_msg[1] + " :...async server ok..." )
trinity_push_msg( socket_msg[0] + " :...app server ok..." )
trinity_push_msg( socket_msg[2] + " :...tunnel services ok..." )
except Exception as e :
_logger.error( e )
# --------------------------------------------------------------------------------------
def start_policy_local() :
"""
:param json policy global:
:return:
"""
try :
if default_policy_j['moniker'] in g_periodic_callbacks :
_logger.info( '..%s policy %s already in effect' % ( items['provider_type'] ,
items['moniker'] ) )
return
pc = PeriodicCallback( lambda: policy_callback( default_policy_j['provider_type'] ,
default_policy_j['moniker'] ,
db_bootstrap ) ,
int( default_policy_j['run_interval'] ) * 1000 )
_logger.info( '..started periodic callback local cal with params%s' % json.dumps( default_policy_j ) )
pc.start()
g_periodic_callbacks[id] = pc
except Exception as e :
_logger.error( 'start policy local: %s' % str( e ) )
# --------------------------------------------------------------------------------------
def start_search_gadget_callback() :
"""
:return:
"""
# gadget
try :
# start gadget
pc = PeriodicCallback( lambda: search_gadget_callback( True ) ,
60 * 1000 )
_logger.info( '..started search gadget callback with interval of %d...' % 60 )
g_periodic_callbacks['search_gadget'] = pc
pc.start()
except Exception as e :
_logger.error( '...search gadget %s....' % str( e ) )
# --------------------------------------------------------------------------------------
class cci_sibling_probe( tornado.web.RequestHandler ) :
"""
:return sibling device id
"""
pass
# --------------------------------------------------------------------------------------
def probe_siblings_thred() :
"""
:return:
"""
#ip = tr_utils.retr_local_ip_info()
sleep( 3 )
# --------------------------------------------------------------------------------------
@tornado.gen.coroutine
def sig_handler( sig , frame ) :
"""
:param sig:
:param frame:
:return:
"""
_logger.info('...caught signal: %s', sig )
#sleep( 2 )
#sys.exit( 0 )
tornado.ioloop.IOLoop.instance().add_callback( shutdown )
# --------------------------------------------------------------------------------------
@tornado.gen.coroutine
def shutdown() :
"""
:return:
"""
_logger.info( '....will shutdown in'
' %s seconds ...' , max_wait_seconds_before_shutdown )
io_loop = IOLoop.instance()
deadline = time.time() + max_wait_seconds_before_shutdown
for key , value in g_periodic_callbacks.items() :
if value.is_running() :
_logger.info( '...shutting down callback %s ' % key )
value.stop()
def stop_loop():
now = time.time()
if now < deadline and ( io_loop._callbacks or io_loop._timeouts ) :
io_loop.add_timeout( now + 0.5 , stop_loop )
io_loop.add_timeout( now + 0.5 , stop_loop )
else:
io_loop.stop()
stop_loop()
_logger.info( '...shutdown....' )
# rpc shtudown notification
post_faux_interrupt( "unload_plugin" )
# -----------------------------------------------------------------------------------------
def update_status_callback( http_id ) :
"""
`:param http record ibject id:
:return:
"""
try :
r = requests.post( 'http://localhost:7080/mongo/update_http_server_status' ,
data = json.dumps( { "_id" : http_id ,
"active" : "true" ,
"last_known_ip" : retr_local_ip_info() ,
"last_known_real_ip" : "0.0.0.0"
}
)
)
except Exception as e :
_logger.error( e )
# --------------------------------------------------------------------------------------
def post_search_gadget_text_callback( result , error ) :
"""
:param result:
:param error:
:return:
"""
pass
# --------------------------------------------------------------------------------------
@tornado.gen.coroutine
def search_gadget_text_record_manip( truncate ) :
"""
:return:
"""
_logger.info( '...search_gadget_text_record_manip....' )
mime = chilkat.CkMime()
success = mime.UnlockComponent( "SFCNICOMSMIME_sK7NCgi98C5p" )
if success is not True:
print( mime.lastErrorText() )
print( "unlock failed." )
# collection cannot be sharded
cursor = db.domain_existential_message.find( { 'search_gadget' : False } ,
modifiers={ '$snapshot' : True } )
while ( yield cursor.fetch_next ) :
document = cursor.next_object()
print( str( document['_id'] ) )
mime.LoadMime( document['payload'] )
spec = { "message_id" : document['_id'] ,
"body" : mime.getBodyEncoded() ,
"header" : mime.getEntireHead() }
db.domain_materialized_search_gadget.insert_one( spec ,
callback=post_search_gadget_text_callback )
document['search_gadget'] = True
db.domain_existential_message.save( document )
# -----------------------------------------------------------------------------------------
@tornado.gen.coroutine
def search_gadget_callback( truncate_existential) :
"""
:return:
"""
yield search_gadget_text_record_manip( truncate_existential )
# -----------------------------------------------------------------------------------------
def policy_callback( provider_type , moniker , db ) :
"""
:param items : json
:return:
"""
'''
live_stalker = None
try :
print ( "instantiating {:s} policy->{:s}".format( provider_type , moniker ) )
stalker = getattr( stream_mod , callback_class_dispatch['document'] )
live_stalker = stalker( db_connect_str = db )
live_stalker.prepare()
live_stalker.stalk()
except Exception as e :
print ( e )
finally :
# explicit delete; callbacks are reentrant and objects
# do not go out of scope
del live_stalker
'''
pass
# --------------------------------------------------------------------------------------
def interrupt_int() :
"""
:return:
"""
_logger.info( '...instantiating interrupt interface....' )
mta_thrift_interrupt = thriftpy.load( 'cci_stream_mta_syscall.thrift' ,
module_name = 'mta_thrift')
mta_client = make_client( mta_thrift_interrupt.cci_stream_mta_syscall ,
kernel_host_endpoint ,
int( kernel_host_port ) )
post_faux_interrupt( "ping" )
# --------------------------------------------------------------------------------------
if __name__ == "__main__":
_logger.info( '...setting system signal handlers....' )
signal.signal( signal.SIGTERM , sig_handler )
signal.signal( signal.SIGINT , sig_handler )
# queue vulture
client = queue_client()
search_client = queue_search_client()
stream_client = queue_stream_client()
is_running = False
if not is_running :
# config
cfg = configparser.ConfigParser()
cfg.read( '/etc/chromatic-universe/cci_trinity_async.ini' )
cci_mongo = motor.motor_tornado.MotorClient( cfg['stream-paths']['mongo_db_replica_uri'] )
db = cci_mongo.imap2017
kernel_host_endpoint = cfg['mta-client']['kernel_host_endpoint']
kernel_host_port = cfg['mta-client']['kernel_host_port']
# Watch the queue for when new items show up
_logger.info( '...initializing queue vulture....' )
# policies
tornado.ioloop.IOLoop.instance().add_callback( client.watch_queue )
# session status
tornado.ioloop.IOLoop.instance().add_callback( search_client.watch_search_queue )
# stream data
tornado.ioloop.IOLoop.instance().add_callback( stream_client.watch_stream_queue )
settings = {
"static_path": os.path.join(os.path.dirname(__file__), "static"),
}
# create the web server with async coroutines
_logger.info( '...initializing http services....' )
utility_application = tornado.web.Application([ ( r'/cci_mta_trinity/start', queue_handler_start_policy ) ,
( r'/cci_mta_trinity/stop' , queue_handler_stop_policy ) ,
( r'/cci_mta_trinity' , vulture_index ) ,
( r'/cci_mta_trinity/post_stream_msg' , stream_queue_handler_post_msg ) ,
( r'/cci_mta_trinity/search_gadget' , search_gadget_queue_handler ) ,
( r'/cci_mta_trinity/sibling' , cci_sibling_probe ) ,
] , Debug=True , **settings )
_logger.info( '...starting imap fetch listener on port 7081....' )
utility_application.listen( 7089 )
_logger.info( '...starting async utility server on port 7089....' )
tr_mongo_fetch.imap_fetch_application.listen( 7081 )
_logger.info( '...starting imap search listener on port 7091....' )
tr_mongo_search.imap_search_application.listen( 7091 )
# interrupts
try :
_logger.info( '...instantiating interrupt interface....' )
post_faux_interrupt( "ping" )
except Exception as e :
try :
_logger.error( str( e ) )
sleep( 3 )
post_faux_interrupt( "ping" )
except Exception as e :
_logger.error( '...interrupt syscall interface out of service....' )
# start heartbeat in 30 seconds
#_logger.info( '...scheduling search gadget ....' )
#tornado.ioloop.IOLoop.instance().call_later( 30 , start_search_gadget_callback )
# start default document policy in 60 seconds
#_logger.info( '...scheduling default document policy ....' )
#tornado.ioloop.IOLoop.instance().call_later( 60 , start_policy_local )
# start web socket push
psettings = {
"static_path": os.path.join(os.path.dirname(__file__), "static"),
}
push_application = tornado.web.Application([ (r'/cci_mta_trinity-stream', trinity_push_handler ) ] ,
Debug=True , **psettings )
# ])
_logger.info( '...starting web socket push server on port 7082 ....' )
http_server = tornado.httpserver.HTTPServer( push_application )
http_server.listen( 7082 )
# set final callback for 10 seconds to signal init successs
#_logger.info( '...scheduling fanfare ....' )
#tornado.ioloop.IOLoop.instance().call_later( 15 , init_update_callback )
# tornado wsgi server , flask application cci-trinity , system call interface
_logger.info( '...starting cci_trinity.... ' )
http_server = HTTPServer( WSGIContainer( trinity_app ) )
http_server.listen( 7080 , address='127.0.0.1' )
#kafka
tornado.ioloop.IOLoop.instance().add_callback( tr_kafka_intf.consume_imap_notifications )
tornado.ioloop.IOLoop.instance().add_callback( tr_kafka_intf.consume_mta_append_notifications )
# run main io
_logger.info( '...starting main io loop ....' )
tornado.ioloop.IOLoop.instance().start()
else :
_logger.info( '...server already running... pid %s....' % pid )
sys.exit( 1 )
| {
"content_hash": "ee23582cfcce8018fe5d8ca56886793a",
"timestamp": "",
"source": "github",
"line_count": 829,
"max_line_length": 134,
"avg_line_length": 33.635705669481304,
"alnum_prop": 0.43426337684693733,
"repo_name": "chromatic-universe/cci-stream-mta",
"id": "7c17e5ee92eda6766e031009855fdf83ee157967",
"size": "27936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/cci_stream_tornado/cci_mta_trinity/cci_trinity_async.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9733"
},
{
"name": "C++",
"bytes": "153445"
},
{
"name": "CMake",
"bytes": "13111"
},
{
"name": "CSS",
"bytes": "1988"
},
{
"name": "HTML",
"bytes": "19057"
},
{
"name": "Python",
"bytes": "274167"
},
{
"name": "Shell",
"bytes": "4488"
},
{
"name": "Thrift",
"bytes": "4716"
}
],
"symlink_target": ""
} |
"""
gettext for openstack-common modules.
Usual usage in an openstack.common module:
from payloadvoice.openstack.common.gettextutils import _
"""
import copy
import gettext
import logging
import os
import re
try:
import UserString as _userString
except ImportError:
import collections as _userString
from babel import localedata
import six
_localedir = os.environ.get('payloadvoice'.upper() + '_LOCALEDIR')
_t = gettext.translation('payloadvoice', localedir=_localedir, fallback=True)
_AVAILABLE_LANGUAGES = {}
USE_LAZY = False
def enable_lazy():
"""Convenience function for configuring _() to use lazy gettext
Call this at the start of execution to enable the gettextutils._
function to use lazy gettext functionality. This is useful if
your project is importing _ directly instead of using the
gettextutils.install() way of importing the _ function.
"""
global USE_LAZY
USE_LAZY = True
def _(msg):
if USE_LAZY:
return Message(msg, 'payloadvoice')
else:
if six.PY3:
return _t.gettext(msg)
return _t.ugettext(msg)
def install(domain, lazy=False):
"""Install a _() function using the given translation domain.
Given a translation domain, install a _() function using gettext's
install() function.
The main difference from gettext.install() is that we allow
overriding the default localedir (e.g. /usr/share/locale) using
a translation-domain-specific environment variable (e.g.
NOVA_LOCALEDIR).
:param domain: the translation domain
:param lazy: indicates whether or not to install the lazy _() function.
The lazy _() introduces a way to do deferred translation
of messages by installing a _ that builds Message objects,
instead of strings, which can then be lazily translated into
any available locale.
"""
if lazy:
# NOTE(mrodden): Lazy gettext functionality.
#
# The following introduces a deferred way to do translations on
# messages in OpenStack. We override the standard _() function
# and % (format string) operation to build Message objects that can
# later be translated when we have more information.
#
# Also included below is an example LocaleHandler that translates
# Messages to an associated locale, effectively allowing many logs,
# each with their own locale.
def _lazy_gettext(msg):
"""Create and return a Message object.
Lazy gettext function for a given domain, it is a factory method
for a project/module to get a lazy gettext function for its own
translation domain (i.e. nova, glance, cinder, etc.)
Message encapsulates a string so that we can translate
it later when needed.
"""
return Message(msg, domain)
from six import moves
moves.builtins.__dict__['_'] = _lazy_gettext
else:
localedir = '%s_LOCALEDIR' % domain.upper()
if six.PY3:
gettext.install(domain,
localedir=os.environ.get(localedir))
else:
gettext.install(domain,
localedir=os.environ.get(localedir),
unicode=True)
class Message(_userString.UserString, object):
"""Class used to encapsulate translatable messages."""
def __init__(self, msg, domain):
# _msg is the gettext msgid and should never change
self._msg = msg
self._left_extra_msg = ''
self._right_extra_msg = ''
self._locale = None
self.params = None
self.domain = domain
@property
def data(self):
# NOTE(mrodden): this should always resolve to a unicode string
# that best represents the state of the message currently
localedir = os.environ.get(self.domain.upper() + '_LOCALEDIR')
if self.locale:
lang = gettext.translation(self.domain,
localedir=localedir,
languages=[self.locale],
fallback=True)
else:
# use system locale for translations
lang = gettext.translation(self.domain,
localedir=localedir,
fallback=True)
if six.PY3:
ugettext = lang.gettext
else:
ugettext = lang.ugettext
full_msg = (self._left_extra_msg +
ugettext(self._msg) +
self._right_extra_msg)
if self.params is not None:
full_msg = full_msg % self.params
return six.text_type(full_msg)
@property
def locale(self):
return self._locale
@locale.setter
def locale(self, value):
self._locale = value
if not self.params:
return
# This Message object may have been constructed with one or more
# Message objects as substitution parameters, given as a single
# Message, or a tuple or Map containing some, so when setting the
# locale for this Message we need to set it for those Messages too.
if isinstance(self.params, Message):
self.params.locale = value
return
if isinstance(self.params, tuple):
for param in self.params:
if isinstance(param, Message):
param.locale = value
return
if isinstance(self.params, dict):
for param in self.params.values():
if isinstance(param, Message):
param.locale = value
def _save_dictionary_parameter(self, dict_param):
full_msg = self.data
# look for %(blah) fields in string;
# ignore %% and deal with the
# case where % is first character on the line
keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', full_msg)
# if we don't find any %(blah) blocks but have a %s
if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg):
# apparently the full dictionary is the parameter
params = copy.deepcopy(dict_param)
else:
params = {}
for key in keys:
try:
params[key] = copy.deepcopy(dict_param[key])
except TypeError:
# cast uncopyable thing to unicode string
params[key] = six.text_type(dict_param[key])
return params
def _save_parameters(self, other):
# we check for None later to see if
# we actually have parameters to inject,
# so encapsulate if our parameter is actually None
if other is None:
self.params = (other, )
elif isinstance(other, dict):
self.params = self._save_dictionary_parameter(other)
else:
# fallback to casting to unicode,
# this will handle the problematic python code-like
# objects that cannot be deep-copied
try:
self.params = copy.deepcopy(other)
except TypeError:
self.params = six.text_type(other)
return self
# overrides to be more string-like
def __unicode__(self):
return self.data
def __str__(self):
if six.PY3:
return self.__unicode__()
return self.data.encode('utf-8')
def __getstate__(self):
to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg',
'domain', 'params', '_locale']
new_dict = self.__dict__.fromkeys(to_copy)
for attr in to_copy:
new_dict[attr] = copy.deepcopy(self.__dict__[attr])
return new_dict
def __setstate__(self, state):
for (k, v) in state.items():
setattr(self, k, v)
# operator overloads
def __add__(self, other):
copied = copy.deepcopy(self)
copied._right_extra_msg += other.__str__()
return copied
def __radd__(self, other):
copied = copy.deepcopy(self)
copied._left_extra_msg += other.__str__()
return copied
def __mod__(self, other):
# do a format string to catch and raise
# any possible KeyErrors from missing parameters
self.data % other
copied = copy.deepcopy(self)
return copied._save_parameters(other)
def __mul__(self, other):
return self.data * other
def __rmul__(self, other):
return other * self.data
def __getitem__(self, key):
return self.data[key]
def __getslice__(self, start, end):
return self.data.__getslice__(start, end)
def __getattribute__(self, name):
# NOTE(mrodden): handle lossy operations that we can't deal with yet
# These override the UserString implementation, since UserString
# uses our __class__ attribute to try and build a new message
# after running the inner data string through the operation.
# At that point, we have lost the gettext message id and can just
# safely resolve to a string instead.
ops = ['capitalize', 'center', 'decode', 'encode',
'expandtabs', 'ljust', 'lstrip', 'replace', 'rjust', 'rstrip',
'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill']
if name in ops:
return getattr(self.data, name)
else:
return _userString.UserString.__getattribute__(self, name)
def get_available_languages(domain):
"""Lists the available languages for the given translation domain.
:param domain: the domain to get languages for
"""
if domain in _AVAILABLE_LANGUAGES:
return copy.copy(_AVAILABLE_LANGUAGES[domain])
localedir = '%s_LOCALEDIR' % domain.upper()
find = lambda x: gettext.find(domain,
localedir=os.environ.get(localedir),
languages=[x])
# NOTE(mrodden): en_US should always be available (and first in case
# order matters) since our in-line message strings are en_US
language_list = ['en_US']
# NOTE(luisg): Babel <1.0 used a function called list(), which was
# renamed to locale_identifiers() in >=1.0, the requirements master list
# requires >=0.9.6, uncapped, so defensively work with both. We can remove
# this check when the master list updates to >=1.0, and all projects udpate
list_identifiers = (getattr(localedata, 'list', None) or
getattr(localedata, 'locale_identifiers'))
locale_identifiers = list_identifiers()
for i in locale_identifiers:
if find(i) is not None:
language_list.append(i)
_AVAILABLE_LANGUAGES[domain] = language_list
return copy.copy(language_list)
def get_localized_message(message, user_locale):
"""Gets a localized version of the given message in the given locale."""
if isinstance(message, Message):
if user_locale:
message.locale = user_locale
return six.text_type(message)
else:
return message
class LocaleHandler(logging.Handler):
"""Handler that can have a locale associated to translate Messages.
A quick example of how to utilize the Message class above.
LocaleHandler takes a locale and a target logging.Handler object
to forward LogRecord objects to after translating the internal Message.
"""
def __init__(self, locale, target):
"""Initialize a LocaleHandler
:param locale: locale to use for translating messages
:param target: logging.Handler object to forward
LogRecord objects to after translation
"""
logging.Handler.__init__(self)
self.locale = locale
self.target = target
def emit(self, record):
if isinstance(record.msg, Message):
# set the locale and resolve to a string
record.msg.locale = self.locale
self.target.emit(record)
| {
"content_hash": "d38c7750f5f3e587601aab94b756f74c",
"timestamp": "",
"source": "github",
"line_count": 347,
"max_line_length": 79,
"avg_line_length": 34.97982708933718,
"alnum_prop": 0.5958971824023727,
"repo_name": "kickstandproject/payload-voice",
"id": "5fc5b74544c45b660e3a7dec4fcf075935a2bf66",
"size": "12839",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "payloadvoice/openstack/common/gettextutils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "106434"
}
],
"symlink_target": ""
} |
"""
util.py holds functions used in more than one module.
NOTE: this code is uploaded to emr with mrjobs
"""
import cPickle
import functools
import boto.s3
from boto.exception import S3ResponseError
from sherlock.common.aws import get_boto_creds
class memoized(object):
"""Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned, and
the function is not re-evaluated.
Based upon from http://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
Nota bene: this decorator memoizes /all/ calls to the function. For a memoization
decorator with limited cache size, consider:
http://code.activestate.com/recipes/496879-memoize-decorator-function-with-cache-size-limit/
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args, **kwargs):
# If the function args cannot be used as a cache hash key, fail fast
key = cPickle.dumps((args, kwargs))
try:
return self.cache[key]
except KeyError:
value = self.func(*args, **kwargs)
self.cache[key] = value
return value
def __repr__(self):
"""Return the function's docstring."""
return self.func.__doc__
def __get__(self, obj, objtype):
"""Support instance methods."""
return functools.partial(self.__call__, obj)
def get_deep(x, path, default=None):
""" access value of a multi-level dict in one go.
:param x: a multi-level dict
:param path: a path to desired key in dict
:param default: a default value to return if no value at path
Examples:
x = {'a': {'b': 5}}
get_deep(x, 'a.b') returns 5
get_deep(x, ['a', 'b']) returns 5
get_deep(x, 'c', 5) returns 5
"""
if path is None or path == '':
path_keys = []
elif type(path) in (list, tuple):
path_keys = path
else:
path_keys = path.split('.')
v = x or {}
for k in path_keys:
try:
v = v.get(k)
except TypeError:
v = None
finally:
if v is None:
return default
return v
def is_s3_path(file_path):
""" Return true if file_path is an S3 path, else false.
"""
schema, _, rest = file_path.partition('://')
return schema == 's3'
def parse_s3_path(file_path):
if not is_s3_path(file_path):
raise ValueError('{0} is not a valid s3 path'.format(file_path))
parse_array = file_path.split("/", 3)
bucket = parse_array[2]
prefix = parse_array[3]
return bucket, prefix
def _load_from_s3_region(conn, bucket_name, key_name):
bucket = conn.get_bucket(bucket_name)
key = bucket.get_key(key_name)
if key is None:
raise ValueError('s3://{0}/{1}: no such file'.format(
bucket_name, key_name
))
return key.get_contents_as_string()
def load_from_s3_file(s3_uri):
"""Load data from S3
Useful for loading small config or schema files
:param s3_uri: path to S3 uri
:returns: file contents
"""
_, _, path = s3_uri.partition('://')
bucket_name, _, key_name = path.partition('/')
# if region is in a bucket name, put that region first
def preferred_region(item):
return item.name not in bucket_name
boto_creds = get_boto_creds()
for region in sorted(boto.s3.regions(), key=preferred_region):
try:
conn = boto.s3.connect_to_region(region.name, **boto_creds)
return _load_from_s3_region(conn, bucket_name, key_name)
except S3ResponseError as e:
# skip to next region if access is not allowed from this one
if e.status not in [403, 301]:
raise
raise ValueError("{0}: No valid region found".format(s3_uri))
def load_from_file(path):
"""Load data from local disk or S3, transparently for caller
Useful for loading small config or schema files
:param path: path to file on disk or S3 uri
:returns: file contents
"""
if is_s3_path(path):
return load_from_s3_file(path)
with open(path, 'r') as f:
return f.read()
| {
"content_hash": "1a115018971389d2ddbc7a7ec650427c",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 96,
"avg_line_length": 30.085714285714285,
"alnum_prop": 0.6103988603988604,
"repo_name": "Yelp/mycroft",
"id": "b945c9c15b79ca0634d28c5d90501af63dd4bb46",
"size": "4236",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mycroft/sherlock/common/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "389"
},
{
"name": "Dockerfile",
"bytes": "1402"
},
{
"name": "HTML",
"bytes": "32973"
},
{
"name": "JavaScript",
"bytes": "41476"
},
{
"name": "Makefile",
"bytes": "2094"
},
{
"name": "Python",
"bytes": "590882"
},
{
"name": "Shell",
"bytes": "1011"
}
],
"symlink_target": ""
} |
from PySide import QtCore, QtGui
class TabDialog(QtGui.QDialog):
def __init__(self, fileName, parent=None):
super(TabDialog, self).__init__(parent)
fileInfo = QtCore.QFileInfo(fileName)
tabWidget = QtGui.QTabWidget()
tabWidget.addTab(GeneralTab(fileInfo), "General")
tabWidget.addTab(PermissionsTab(fileInfo), "Permissions")
tabWidget.addTab(ApplicationsTab(fileInfo), "Applications")
buttonBox = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel)
buttonBox.accepted.connect(self.accept)
buttonBox.rejected.connect(self.reject)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addWidget(tabWidget)
mainLayout.addWidget(buttonBox)
self.setLayout(mainLayout)
self.setWindowTitle("Tab Dialog")
class GeneralTab(QtGui.QWidget):
def __init__(self, fileInfo, parent=None):
super(GeneralTab, self).__init__(parent)
fileNameLabel = QtGui.QLabel("File Name:")
fileNameEdit = QtGui.QLineEdit(fileInfo.fileName())
pathLabel = QtGui.QLabel("Path:")
pathValueLabel = QtGui.QLabel(fileInfo.absoluteFilePath())
pathValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
sizeLabel = QtGui.QLabel("Size:")
size = fileInfo.size() // 1024
sizeValueLabel = QtGui.QLabel("%d K" % size)
sizeValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
lastReadLabel = QtGui.QLabel("Last Read:")
lastReadValueLabel = QtGui.QLabel(fileInfo.lastRead().toString())
lastReadValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
lastModLabel = QtGui.QLabel("Last Modified:")
lastModValueLabel = QtGui.QLabel(fileInfo.lastModified().toString())
lastModValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addWidget(fileNameLabel)
mainLayout.addWidget(fileNameEdit)
mainLayout.addWidget(pathLabel)
mainLayout.addWidget(pathValueLabel)
mainLayout.addWidget(sizeLabel)
mainLayout.addWidget(sizeValueLabel)
mainLayout.addWidget(lastReadLabel)
mainLayout.addWidget(lastReadValueLabel)
mainLayout.addWidget(lastModLabel)
mainLayout.addWidget(lastModValueLabel)
mainLayout.addStretch(1)
self.setLayout(mainLayout)
class PermissionsTab(QtGui.QWidget):
def __init__(self, fileInfo, parent=None):
super(PermissionsTab, self).__init__(parent)
permissionsGroup = QtGui.QGroupBox("Permissions")
readable = QtGui.QCheckBox("Readable")
if fileInfo.isReadable():
readable.setChecked(True)
writable = QtGui.QCheckBox("Writable")
if fileInfo.isWritable():
writable.setChecked(True)
executable = QtGui.QCheckBox("Executable")
if fileInfo.isExecutable():
executable.setChecked(True)
ownerGroup = QtGui.QGroupBox("Ownership")
ownerLabel = QtGui.QLabel("Owner")
ownerValueLabel = QtGui.QLabel(fileInfo.owner())
ownerValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
groupLabel = QtGui.QLabel("Group")
groupValueLabel = QtGui.QLabel(fileInfo.group())
groupValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
permissionsLayout = QtGui.QVBoxLayout()
permissionsLayout.addWidget(readable)
permissionsLayout.addWidget(writable)
permissionsLayout.addWidget(executable)
permissionsGroup.setLayout(permissionsLayout)
ownerLayout = QtGui.QVBoxLayout()
ownerLayout.addWidget(ownerLabel)
ownerLayout.addWidget(ownerValueLabel)
ownerLayout.addWidget(groupLabel)
ownerLayout.addWidget(groupValueLabel)
ownerGroup.setLayout(ownerLayout)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addWidget(permissionsGroup)
mainLayout.addWidget(ownerGroup)
mainLayout.addStretch(1)
self.setLayout(mainLayout)
class ApplicationsTab(QtGui.QWidget):
def __init__(self, fileInfo, parent=None):
super(ApplicationsTab, self).__init__(parent)
topLabel = QtGui.QLabel("Open with:")
applicationsListBox = QtGui.QListWidget()
applications = []
for i in range(1, 31):
applications.append("Application %d" % i)
applicationsListBox.insertItems(0, applications)
alwaysCheckBox = QtGui.QCheckBox()
if fileInfo.suffix():
alwaysCheckBox = QtGui.QCheckBox("Always use this application to "
"open files with the extension '%s'" % fileInfo.suffix())
else:
alwaysCheckBox = QtGui.QCheckBox("Always use this application to "
"open this type of file")
layout = QtGui.QVBoxLayout()
layout.addWidget(topLabel)
layout.addWidget(applicationsListBox)
layout.addWidget(alwaysCheckBox)
self.setLayout(layout)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
if len(sys.argv) >= 2:
fileName = sys.argv[1]
else:
fileName = "."
tabdialog = TabDialog(fileName)
sys.exit(tabdialog.exec_()) | {
"content_hash": "7e16c3af756a823bc4228835393c7e27",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 101,
"avg_line_length": 34.140127388535035,
"alnum_prop": 0.6675373134328358,
"repo_name": "pauloacmelo/papelex_winthor",
"id": "635b738c7dbfa39acaa10abd540aa5e4b09ffb5f",
"size": "6441",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tab_example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "2116"
},
{
"name": "Python",
"bytes": "711776"
}
],
"symlink_target": ""
} |
import urllib.request
import urllib.parse
import json as j
import sys
__version__ = 0.242
def query(query, useragent='python-duckduckgo '+str(__version__), safesearch=True, html=False, meanings=True, **kwargs):
"""
Query DuckDuckGo, returning a Results object.
Here's a query that's unlikely to change:
>>> result = query('1 + 1')
>>> result.type
'nothing'
>>> result.answer.text
'1 + 1 = 2'
>>> result.answer.type
'calc'
Keword arguments:
useragent: UserAgent to use while querying. Default: "python-duckduckgo %d" (str)
safesearch: True for on, False for off. Default: True (bool)
html: True to allow HTML in output. Default: False (bool)
meanings: True to include disambiguations in results (bool)
Any other keyword arguments are passed directly to DuckDuckGo as URL params.
""" % __version__
safesearch = '1' if safesearch else '-1'
html = '0' if html else '1'
meanings = '0' if meanings else '1'
params = {
'q': query,
'o': 'json',
'kp': safesearch,
'no_redirect': '1',
'no_html': html,
'd': meanings,
}
params.update(kwargs)
encparams = urllib.parse.urlencode(params)
url = 'http://api.duckduckgo.com/?' + encparams
request = urllib.request.Request(url, headers={'User-Agent': useragent})
response = urllib.request.urlopen(request)
json = j.loads(response.read().decode('utf-8'))
response.close()
return Results(json)
class Results(object):
def __init__(self, json):
self.type = {'A': 'answer', 'D': 'disambiguation',
'C': 'category', 'N': 'name',
'E': 'exclusive', '': 'nothing'}.get(json.get('Type',''), '')
self.json = json
self.api_version = None # compat
self.heading = json.get('Heading', '')
self.results = [Result(elem) for elem in json.get('Results',[])]
self.related = [Result(elem) for elem in
json.get('RelatedTopics',[])]
self.abstract = Abstract(json)
self.redirect = Redirect(json)
self.definition = Definition(json)
self.answer = Answer(json)
self.image = Image({'Result':json.get('Image','')})
class Abstract(object):
def __init__(self, json):
self.html = json.get('Abstract', '')
self.text = json.get('AbstractText', '')
self.url = json.get('AbstractURL', '')
self.source = json.get('AbstractSource')
class Redirect(object):
def __init__(self, json):
self.url = json.get('Redirect', '')
class Result(object):
def __init__(self, json):
self.topics = json.get('Topics', [])
if self.topics:
self.topics = [Result(t) for t in self.topics]
return
self.html = json.get('Result')
self.text = json.get('Text')
self.url = json.get('FirstURL')
icon_json = json.get('Icon')
if icon_json is not None:
self.icon = Image(icon_json)
else:
self.icon = None
class Image(object):
def __init__(self, json):
self.url = json.get('Result')
self.height = json.get('Height', None)
self.width = json.get('Width', None)
class Answer(object):
def __init__(self, json):
self.text = json.get('Answer')
self.type = json.get('AnswerType', '')
class Definition(object):
def __init__(self, json):
self.text = json.get('Definition','')
self.url = json.get('DefinitionURL')
self.source = json.get('DefinitionSource')
def get_zci(q, web_fallback=True, priority=['answer', 'abstract', 'related.0', 'definition'], urls=True, **kwargs):
'''A helper method to get a single (and hopefully the best) ZCI result.
priority=list can be used to set the order in which fields will be checked for answers.
Use web_fallback=True to fall back to grabbing the first web result.
passed to query. This method will fall back to 'Sorry, no results.'
if it cannot find anything.'''
ddg = query('\\'+q, **kwargs)
response = ''
for p in priority:
ps = p.split('.')
type = ps[0]
index = int(ps[1]) if len(ps) > 1 else None
result = getattr(ddg, type)
if index is not None:
if not hasattr(result, '__getitem__'): raise TypeError('%s field is not indexable' % type)
result = result[index] if len(result) > index else None
if not result: continue
if result.text: response = result.text
if result.text and hasattr(result,'url') and urls:
if result.url: response += ' (%s)' % result.url
if response: break
# if there still isn't anything, try to get the first web result
if not response and web_fallback:
if ddg.redirect.url:
response = ddg.redirect.url
# final fallback
if not response:
response = 'Sorry, no results.'
return response
def main():
if len(sys.argv) > 1:
q = query(' '.join(sys.argv[1:]))
keys = q.json.keys()
keys.sort()
for key in keys:
sys.stdout.write(key)
if type(q.json[key]) in [str,unicode,int]: print(':', q.json[key])
else:
sys.stdout.write('\n')
for i in q.json[key]: print('\t',i)
else:
print('Usage: %s [query]' % sys.argv[0])
| {
"content_hash": "09e34686f383fda1d26429119074de63",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 120,
"avg_line_length": 30.869318181818183,
"alnum_prop": 0.5766611448555126,
"repo_name": "haitaka/DroiTaka",
"id": "8769347b4880035c0ef624a8eab36276a07cd1fa",
"size": "5680",
"binary": false,
"copies": "1",
"ref": "refs/heads/rebuild",
"path": "cogs/utils/api/duckduckgo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "81107"
}
],
"symlink_target": ""
} |
import os
import asyncio
from rainfall.web import Application, HTTPHandler, WSHandler
from rainfall.http import HTTPError
class HelloHandler(HTTPHandler):
def handle(self, request):
return 'Hello!'
class TemplateHandler(HTTPHandler):
def handle(self, request):
return self.render('base.html', text='Rendered')
class HTTPErrorHandler(HTTPHandler):
def handle(self, request):
return HTTPError(403)
class ExceptionHandler(HTTPHandler):
def handle(self, request):
raise Exception('Fail')
class SleepHandler(HTTPHandler):
@asyncio.coroutine
def handle(self, request):
yield from asyncio.sleep(0.1)
return 'Done'
class ParamHandler(HTTPHandler):
def handle(self, request, number):
return number
class GetFormHandler(HTTPHandler):
def handle(self, request):
data = {}
if request.GET:
data = request.GET
return self.render('form.html', method='GET', data=data)
class PostFormHandler(HTTPHandler):
def handle(self, request):
data = {}
if request.POST:
data = request.POST
return self.render('form.html', method='POST', data=data)
class EtagHandler(HTTPHandler):
use_etag = True
payload = "PowerOfYourHeart"
def handle(self, request):
return self.payload
class EchoWSHandler(WSHandler):
@asyncio.coroutine
def on_message(self, message):
yield from self.send_message(message)
settings = {
'template_path': os.path.join(os.path.dirname(__file__), "templates"),
'host': '127.0.0.1',
}
app = Application(
{
r'^/$': HelloHandler,
r'^/template$': TemplateHandler,
r'^/http_error$': HTTPErrorHandler,
r'^/exc_error$': ExceptionHandler,
r'^/sleep$': SleepHandler,
r'^/param/(?P<number>\d+)$': ParamHandler,
r'^/forms/get$': GetFormHandler,
r'^/forms/post$': PostFormHandler,
r'^/etag$': EtagHandler,
r'^/ws$': EchoWSHandler,
},
settings=settings,
)
if __name__ == '__main__':
app.run()
| {
"content_hash": "a169fea01cb09c24da1cb868605a534a",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 74,
"avg_line_length": 19.574074074074073,
"alnum_prop": 0.6234626300851467,
"repo_name": "mind1master/rainfall",
"id": "8422e514cdb78367d766c7652580eb3f41aaabca",
"size": "2114",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rainfall/tests/app.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "6770"
},
{
"name": "Python",
"bytes": "36618"
}
],
"symlink_target": ""
} |
import itertools
import os
import shutil
import struct
import tempfile
import uuid
from oslo_config import cfg
from oslo_log import log as oslo_logging
from cloudbaseinit import exception
from cloudbaseinit.metadata.services.osconfigdrive import base
from cloudbaseinit.osutils import factory as osutils_factory
from cloudbaseinit.utils.windows import disk
from cloudbaseinit.utils.windows import vfat
opts = [
cfg.StrOpt('bsdtar_path', default='bsdtar.exe',
help='Path to "bsdtar", used to extract ISO ConfigDrive '
'files'),
]
CONF = cfg.CONF
CONF.register_opts(opts)
LOG = oslo_logging.getLogger(__name__)
CONFIG_DRIVE_LABEL = 'config-2'
MAX_SECTOR_SIZE = 4096
# Absolute offset values and the ISO magic string.
OFFSET_BOOT_RECORD = 0x8000
OFFSET_ISO_ID = OFFSET_BOOT_RECORD + 1
ISO_ID = b'CD001'
# Little-endian unsigned short size values.
OFFSET_VOLUME_SIZE = OFFSET_BOOT_RECORD + 80
OFFSET_BLOCK_SIZE = OFFSET_BOOT_RECORD + 128
PEEK_SIZE = 2
class WindowsConfigDriveManager(base.BaseConfigDriveManager):
def __init__(self):
super(WindowsConfigDriveManager, self).__init__()
self._osutils = osutils_factory.get_os_utils()
def _check_for_config_drive(self, drive):
label = self._osutils.get_volume_label(drive)
if label and label.lower() == CONFIG_DRIVE_LABEL and \
os.path.exists(os.path.join(drive,
'openstack\\latest\\'
'meta_data.json')):
LOG.info('Config Drive found on %s', drive)
return True
return False
def _get_iso_file_size(self, device):
if not device.fixed:
return None
if not device.size > (OFFSET_BLOCK_SIZE + PEEK_SIZE):
return None
off = device.seek(OFFSET_ISO_ID)
magic = device.read(len(ISO_ID), skip=OFFSET_ISO_ID - off)
if ISO_ID != magic:
return None
off = device.seek(OFFSET_VOLUME_SIZE)
volume_size_bytes = device.read(PEEK_SIZE,
skip=OFFSET_VOLUME_SIZE - off)
off = device.seek(OFFSET_BLOCK_SIZE)
block_size_bytes = device.read(PEEK_SIZE,
skip=OFFSET_BLOCK_SIZE - off)
volume_size = struct.unpack("<H", volume_size_bytes)[0]
block_size = struct.unpack("<H", block_size_bytes)[0]
return volume_size * block_size
def _write_iso_file(self, device, iso_file_path, iso_file_size):
with open(iso_file_path, 'wb') as stream:
offset = 0
# Read multiples of the sector size bytes
# until the entire ISO content is written.
while offset < iso_file_size:
real_offset = device.seek(offset)
bytes_to_read = min(MAX_SECTOR_SIZE, iso_file_size - offset)
data = device.read(bytes_to_read, skip=offset - real_offset)
stream.write(data)
offset += bytes_to_read
def _extract_files_from_iso(self, iso_file_path):
args = [CONF.bsdtar_path, '-xf', iso_file_path,
'-C', self.target_path]
(out, err, exit_code) = self._osutils.execute_process(args, False)
if exit_code:
raise exception.CloudbaseInitException(
'Failed to execute "bsdtar" from path "%(bsdtar_path)s" with '
'exit code: %(exit_code)s\n%(out)s\n%(err)s' % {
'bsdtar_path': CONF.bsdtar_path,
'exit_code': exit_code,
'out': out, 'err': err})
def _extract_iso_from_devices(self, devices):
"""Search across multiple devices for a raw ISO."""
extracted = False
iso_file_path = os.path.join(tempfile.gettempdir(),
str(uuid.uuid4()) + '.iso')
for device in devices:
try:
with device:
iso_file_size = self._get_iso_file_size(device)
if iso_file_size:
LOG.info('ISO9660 disk found on %s', device)
self._write_iso_file(device, iso_file_path,
iso_file_size)
self._extract_files_from_iso(iso_file_path)
extracted = True
break
except Exception as exc:
LOG.warning('ISO extraction failed on %(device)s with '
'%(error)r', {"device": device, "error": exc})
if os.path.isfile(iso_file_path):
os.remove(iso_file_path)
return extracted
def _get_config_drive_from_cdrom_drive(self):
for drive_letter in self._osutils.get_cdrom_drives():
if self._check_for_config_drive(drive_letter):
os.rmdir(self.target_path)
shutil.copytree(drive_letter, self.target_path)
return True
return False
def _get_config_drive_from_raw_hdd(self):
disks = map(disk.Disk, self._osutils.get_physical_disks())
return self._extract_iso_from_devices(disks)
def _get_config_drive_from_vfat(self):
for drive_path in self._osutils.get_physical_disks():
if vfat.is_vfat_drive(self._osutils, drive_path):
LOG.info('Config Drive found on disk %r', drive_path)
vfat.copy_from_vfat_drive(self._osutils, drive_path,
self.target_path)
return True
return False
def _get_config_drive_from_partition(self):
for disk_path in self._osutils.get_physical_disks():
physical_drive = disk.Disk(disk_path)
with physical_drive:
partitions = physical_drive.partitions()
extracted = self._extract_iso_from_devices(partitions)
if extracted:
return True
return False
def _get_config_drive_from_volume(self):
"""Look through all the volumes for config drive."""
volumes = self._osutils.get_volumes()
for volume in volumes:
if self._check_for_config_drive(volume):
os.rmdir(self.target_path)
shutil.copytree(volume, self.target_path)
return True
return False
def _get_config_drive_files(self, cd_type, cd_location):
get_config_drive = self.config_drive_type_location.get(
"{}_{}".format(cd_location, cd_type))
if get_config_drive:
return get_config_drive()
else:
LOG.debug("Irrelevant type %(type)s in %(location)s location; "
"skip",
{"type": cd_type, "location": cd_location})
return False
def get_config_drive_files(self, searched_types=None,
searched_locations=None):
searched_types = searched_types or []
searched_locations = searched_locations or []
for cd_type, cd_location in itertools.product(searched_types,
searched_locations):
LOG.debug('Looking for Config Drive %(type)s in %(location)s',
{"type": cd_type, "location": cd_location})
if self._get_config_drive_files(cd_type, cd_location):
return True
return False
@property
def config_drive_type_location(self):
return {
"cdrom_iso": self._get_config_drive_from_cdrom_drive,
"hdd_iso": self._get_config_drive_from_raw_hdd,
"hdd_vfat": self._get_config_drive_from_vfat,
"partition_iso": self._get_config_drive_from_partition,
"partition_vfat": self._get_config_drive_from_volume,
}
| {
"content_hash": "594081cd298c769a931c8e695ba809d1",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 78,
"avg_line_length": 38.38048780487805,
"alnum_prop": 0.5644382308083375,
"repo_name": "cmin764/cloudbase-init",
"id": "80124421cded81eeff7c4346b9d762501e03bce2",
"size": "8485",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudbaseinit/metadata/services/osconfigdrive/windows.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "789914"
}
],
"symlink_target": ""
} |
import unittest
from unittest import mock
from unittest.mock import patch
from canvas_sdk.client import RequestContext
class TestRequestContext(unittest.TestCase):
longMessage = True
def setUp(self):
patcher = patch.object(RequestContext, 'get_default_headers')
self.auth_token = 'my-auth-token'
self.base_api_url = 'http://fake/canvas/instance/api'
self.addCleanup(patcher.stop)
self.mock_default_headers = patcher.start()
def test_get_default_headers_contains_accept_text_json(self):
"""
Test that the result of get_default_headers contains an accept header with text/json.
"""
self.doCleanups() # First, stop the patch so we can test the result of the actual call
result = RequestContext.get_default_headers()
self.assertTrue('Accept' in result, "Default headers should contain an Accept key")
self.assertEqual('text/json', result.get('Accept', None),
"Default header should indicate that request accepts a text/json header")
def test_initialize_calls_get_default_headers(self):
"""
Test that get_default_headers is called as part of initializing a RequestContext
"""
RequestContext(self.auth_token, self.base_api_url)
self.mock_default_headers.assert_called_once_with()
def test_initialize_raises_attribute_exception_when_using_malformed_api_url(self):
"""
Test that an AttributeError is raised in case the base_api url being passed is a malformed http
or https url.
"""
bogus_urls = (
"http//canvas.com/api",
"ftp://canavas.com/api", # Valid url, but not http or https
"https//canavas.com",
"canvas.api.com/api"
)
for bogus_url in bogus_urls:
with self.assertRaises(AttributeError):
RequestContext(self.auth_token, bogus_url)
def test_initialize_with_defaults_sets_auth_token_and_base_url(self):
"""
Test that required parameters auth_token and base_api_url are set on creation
"""
context = RequestContext(self.auth_token, self.base_api_url)
self.assertEqual(self.auth_token, context.auth_token,
"RequestContext should have an auth_token instance attribute")
self.assertEqual(self.base_api_url, context.base_api_url,
"RequestContext should have a base_api_url instance attribute")
def test_initialize_max_retries_defaults_to_zero(self):
"""
Test that if max_retries is not passed in, the value defaults to zero
"""
context = RequestContext(self.auth_token, self.base_api_url)
self.assertEqual(0, context.max_retries, "max_retries should default to zero on creation")
def test_initialize_headers_defaults_to_get_default_headers(self):
"""
Test that if headers is not passed in, the value defaults to result of get_default_headers
"""
context = RequestContext(self.auth_token, self.base_api_url)
self.assertEqual(self.mock_default_headers.return_value,
context.headers, "max_retries should default to zero on creation")
def test_initialize_cookies_defaults_to_none(self):
"""
Test that if cookies is not passed in, the instance attribute defaults to None
"""
context = RequestContext(self.auth_token, self.base_api_url)
self.assertEqual(None, context.cookies, "cookies should default to None on creation")
def test_initialize_timeout_defaults_to_none(self):
"""
Test that if timeout is not passed in, the instance attribute defaults to None
"""
context = RequestContext(self.auth_token, self.base_api_url)
self.assertEqual(None, context.timeout, "timeout should default to None on creation")
def test_initialize_cert_defaults_to_none(self):
"""
Test that if cert is not passed in, the instance attribute defaults to None
"""
context = RequestContext(self.auth_token, self.base_api_url)
self.assertEqual(None, context.cert, "cert should default to None on creation")
def test_initialize_proxies_defaults_to_none(self):
"""
Test that if proxies is not passed in, the instance attribute defaults to None
"""
context = RequestContext(self.auth_token, self.base_api_url)
self.assertEqual(None, context.proxies, "proxies should default to None on creation")
def test_initialize_verify_defaults_to_true(self):
"""
Test that if verify is not passed in, the instance attribute defaults to True
"""
context = RequestContext(self.auth_token, self.base_api_url)
self.assertEqual(True, context.verify, "verify should default to True on creation")
def test_initialize_merges_headers(self):
"""
Test that if headers are passed in, they are merged into the default headers
"""
default_headers = {'Accept': 'text/json', 'Custom': 'foo'}
self.mock_default_headers.return_value = default_headers
context = RequestContext(
self.auth_token, self.base_api_url, headers={'Custom': 'bar', 'Content': 'xml'})
self.assertEqual({'Accept': 'text/json', 'Custom': 'bar', 'Content': 'xml'},
context.headers, "headers should be merged with default headers")
def test_initialize_per_page_defaults_to_none(self):
"""
Test that if cert is not passed in, the instance attribute defaults to None
"""
context = RequestContext(self.auth_token, self.base_api_url)
self.assertEqual(None, context.per_page, "per_page should default to None on creation")
def test_initialize_from_dictionary(self):
"""
Test that RequestContext can be initialized from a dictionary of settings
"""
dict_settings = {
'auth_token': self.auth_token,
'base_api_url': self.base_api_url,
'max_retries': 5,
'per_page': 20,
'timeout': 60,
'headers': {'foo': 'bar'},
'cookies': {'oreo': 'cookie'},
'proxies': {'my': 'proxy'},
'verify': False,
'cert': 'my-cert'
}
self.mock_default_headers.return_value = {} # Need to merge into a dictionary
context = RequestContext(**dict_settings)
for setting_key in dict_settings:
self.assertEqual(dict_settings.get(setting_key), getattr(context, setting_key),
"Attribute %s should match value passed in through dictionary" % setting_key)
@patch('canvas_sdk.client.request_context.OAuth2Bearer')
def test_auth_property_returns_custom_authorization_callable(self, mock_oauth_bearer):
"""
Test that auth property of RequestContext returns a new instance of OAuth2Bearer
"""
context = RequestContext(self.auth_token, self.base_api_url)
result = context.auth
mock_oauth_bearer.assert_called_once_with(self.auth_token)
self.assertEqual(mock_oauth_bearer.return_value, result,
"Auth property of RequestContext should return an instance of OAuth2Bearer")
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_occurs_by_default_when_accessing_property(self, mock_auth, mock_requests_session):
"""
Test that accessing session property when no session has been set (default) creates a new requests.Session object
"""
context = RequestContext(self.auth_token, self.base_api_url)
context.session
mock_requests_session.assert_called_once_with()
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_sets_auth_to_instance_property(self, mock_auth, mock_requests_session):
"""
Test that Session object has an "auth" property that is the value of the context object's auth property.
"""
context = RequestContext(self.auth_token, self.base_api_url)
result = context.session
self.assertEqual(result.auth, mock_auth.return_value,
"Session attribute should have auth set to context auth")
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_updates_headers_with_instance_headers(self, mock_auth, mock_requests_session):
"""
Test that Session object headers are updated with context headers, when context headers are present (they are
by default set to the value of get_default_headers).
"""
context = RequestContext(self.auth_token, self.base_api_url)
context.session
mock_requests_session.return_value.headers.update.assert_called_once_with(context.headers)
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_updates_headers_with_empty_dict_when_no_instance_headers(self, mock_auth, mock_requests_session):
"""
Test that Session object headers are updated with an empty dictionary if context headers are explicitly set to None.
"""
context = RequestContext(self.auth_token, self.base_api_url)
context.headers = None
context.session
mock_requests_session.return_value.headers.update.assert_called_once_with({})
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_has_stream_set_to_false(self, mock_auth, mock_requests_session):
"""
Test that Session object is created with streaming disabled.
"""
context = RequestContext(self.auth_token, self.base_api_url)
result = context.session
self.assertEqual(
False, result.stream, "Streaming should be disabled for newly created sessions")
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_sets_verify_to_instance_value(self, mock_auth, mock_requests_session):
"""
Test that Session object is created with context verify value.
"""
context = RequestContext(self.auth_token, self.base_api_url, verify=False)
result = context.session
self.assertEqual(
False, result.verify, "Verify attribute on session should be equivalent to attribute passed in to context")
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_sets_cert_to_instance_value(self, mock_auth, mock_requests_session):
"""
Test that Session object is created with context cert value.
"""
context = RequestContext(self.auth_token, self.base_api_url, cert='my-cert')
result = context.session
self.assertEqual(
'my-cert', result.cert, "Cert attribute on session should be equivalent to attribute passed in to context")
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_sets_timeout_to_instance_value(self, mock_auth, mock_requests_session):
"""
Test that Session object is created with context timeout value.
"""
context = RequestContext(self.auth_token, self.base_api_url, timeout=60)
result = context.session
self.assertEqual(
60, result.timeout, "Timeout attribute on session should be equivalent to attribute passed in to context")
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_does_not_explicitly_set_proxies_if_none(self, mock_auth, mock_requests_session):
"""
Test that proxies attribute that Session object may be initialized with is not overriden by an empty/None context property
"""
session_proxy = {'proxy': 'session'}
mock_requests_session.return_value.proxies = session_proxy
context = RequestContext(self.auth_token, self.base_api_url)
context.proxies = None
result = context.session
self.assertEqual(session_proxy, result.proxies,
"Proxies attribute should not be overriden on session when no proxies set on context")
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_sets_proxies_with_context_value(self, mock_auth, mock_requests_session):
"""
Test that proxies attribute that Session object may be initialized with is overriden by corresponding context property
"""
context_proxy = {'proxy': 'session'}
context = RequestContext(self.auth_token, self.base_api_url, proxies=context_proxy)
result = context.session
self.assertEqual(context_proxy, result.proxies,
"Proxies attribute should be set to context value")
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_does_not_explicitly_set_cookies_if_none(self, mock_auth, mock_requests_session):
"""
Test that proxies attribute that Session object may be initialized with is not overriden by an empty/None context property
"""
session_cookies = {'cookies': 'session'}
mock_requests_session.return_value.cookies = session_cookies
context = RequestContext(self.auth_token, self.base_api_url)
context.cookies = None
result = context.session
self.assertEqual(session_cookies, result.cookies,
"Cookies attribute should not be overriden on session when no cookies set on context")
@patch('canvas_sdk.client.request_context.requests.Session')
@patch.object(RequestContext, 'auth', new_callable=mock.PropertyMock)
def test_session_creation_sets_cookies_with_context_value(self, mock_auth, mock_requests_session):
"""
Test that proxies attribute that Session object may be initialized with is overriden by corresponding context property
"""
context_cookies = {'cookies': 'context'}
context = RequestContext(self.auth_token, self.base_api_url, cookies=context_cookies)
result = context.session
self.assertEqual(context_cookies, result.cookies,
"Cookies attribute should be set to context value")
def test_session_returns_stored_value_after_initial_creation(self):
"""
Test that a previously created session is stored/returned when session property is called
"""
previous_session = mock.Mock('previous-session')
context = RequestContext(self.auth_token, self.base_api_url)
context.session = previous_session # Use setter to establish a stored value
self.assertEqual(previous_session, context.session,
"Prior stored session should be returned by a subsequent call to session property")
def test_expire_session_clears_stored_session(self):
"""
Test that a previously created session is cleared out when calling expire_session
"""
previous_session = mock.Mock('previous-session')
context = RequestContext(self.auth_token, self.base_api_url)
context.session = previous_session # Use setter to establish a stored value
context.expire_session()
self.assertNotEqual(previous_session, context.session,
"Prior stored session should have been cleared out after call to expire_session")
| {
"content_hash": "abbef18c0d1219870c6b05e2627823a5",
"timestamp": "",
"source": "github",
"line_count": 318,
"max_line_length": 130,
"avg_line_length": 51.39622641509434,
"alnum_prop": 0.6685022026431718,
"repo_name": "penzance/canvas_python_sdk",
"id": "660e5be240b0d893345b16a58777753eb247bf34",
"size": "16344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/client/test_request_context.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1066725"
}
],
"symlink_target": ""
} |
from django.apps import AppConfig
class DemoConfig(AppConfig):
name = 'tests.testapp'
| {
"content_hash": "822f9f2620629f42ea96f9da8e126665",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 33,
"avg_line_length": 18.4,
"alnum_prop": 0.75,
"repo_name": "raphaelm/django-hierarkey",
"id": "548e8e31640722a5febad3978659d56216c27bd2",
"size": "92",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/testapp/apps.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "49472"
}
],
"symlink_target": ""
} |
"""
Performs cartographic transformations (converts from
longitude,latitude to native map projection x,y coordinates and
vice versa) using PROJ (https://proj.org).
A Proj class instance is initialized with proj map projection
control parameter key/value pairs. The key/value pairs can
either be passed in a dictionary, or as keyword arguments,
or as a PROJ string (compatible with the proj command). See
:ref:`projections` for examples of
key/value pairs defining different map projections.
Calling a Proj class instance with the arguments lon, lat will
convert lon/lat (in degrees) to x/y native map projection
coordinates (in meters).
"""
import re
import warnings
from typing import Any, Optional, Tuple, Type
from pyproj._compat import cstrencode
from pyproj._transformer import Factors
from pyproj.crs import CRS
from pyproj.enums import TransformDirection
from pyproj.list import get_proj_operations_map
from pyproj.transformer import Transformer, TransformerFromPipeline
from pyproj.utils import _convertback, _copytobuffer
pj_list = get_proj_operations_map()
class Proj(Transformer):
"""
Performs cartographic transformations. Converts from
longitude, latitude to native map projection x,y coordinates and
vice versa using PROJ (https://proj.org).
Attributes
----------
srs: str
The string form of the user input used to create the Proj.
crs: pyproj.crs.CRS
The CRS object associated with the Proj.
"""
def __init__(
self, projparams: Any = None, preserve_units: bool = True, **kwargs
) -> None:
"""
A Proj class instance is initialized with proj map projection
control parameter key/value pairs. The key/value pairs can
either be passed in a dictionary, or as keyword arguments,
or as a PROJ string (compatible with the proj command). See
:ref:`projections` for examples of
key/value pairs defining different map projections.
Parameters
----------
projparams: int, str, dict, pyproj.CRS
A PROJ or WKT string, PROJ dict, EPSG integer, or a pyproj.CRS instance.
preserve_units: bool
If false, will ensure +units=m.
**kwargs:
PROJ projection parameters.
Example usage:
>>> from pyproj import Proj
>>> p = Proj(proj='utm',zone=10,ellps='WGS84', preserve_units=False)
>>> x,y = p(-120.108, 34.36116666)
>>> 'x=%9.3f y=%11.3f' % (x,y)
'x=765975.641 y=3805993.134'
>>> 'lon=%8.3f lat=%5.3f' % p(x,y,inverse=True)
'lon=-120.108 lat=34.361'
>>> # do 3 cities at a time in a tuple (Fresno, LA, SF)
>>> lons = (-119.72,-118.40,-122.38)
>>> lats = (36.77, 33.93, 37.62 )
>>> x,y = p(lons, lats)
>>> 'x: %9.3f %9.3f %9.3f' % x
'x: 792763.863 925321.537 554714.301'
>>> 'y: %9.3f %9.3f %9.3f' % y
'y: 4074377.617 3763936.941 4163835.303'
>>> lons, lats = p(x, y, inverse=True) # inverse transform
>>> 'lons: %8.3f %8.3f %8.3f' % lons
'lons: -119.720 -118.400 -122.380'
>>> 'lats: %8.3f %8.3f %8.3f' % lats
'lats: 36.770 33.930 37.620'
>>> p2 = Proj('+proj=utm +zone=10 +ellps=WGS84', preserve_units=False)
>>> x,y = p2(-120.108, 34.36116666)
>>> 'x=%9.3f y=%11.3f' % (x,y)
'x=765975.641 y=3805993.134'
>>> p = Proj("epsg:32667", preserve_units=False)
>>> 'x=%12.3f y=%12.3f (meters)' % p(-114.057222, 51.045)
'x=-1783506.250 y= 6193827.033 (meters)'
>>> p = Proj("epsg:32667")
>>> 'x=%12.3f y=%12.3f (feet)' % p(-114.057222, 51.045)
'x=-5851386.754 y=20320914.191 (feet)'
>>> # test data with radian inputs
>>> p1 = Proj("epsg:4214")
>>> x1, y1 = p1(116.366, 39.867)
>>> f'{x1:.3f} {y1:.3f}'
'116.366 39.867'
>>> x2, y2 = p1(x1, y1, inverse=True)
>>> f'{x2:.3f} {y2:.3f}'
'116.366 39.867'
"""
self.crs = CRS.from_user_input(projparams, **kwargs)
# make sure units are meters if preserve_units is False.
if not preserve_units and "foot" in self.crs.axis_info[0].unit_name:
# ignore export to PROJ string deprecation warning
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore",
"You will likely lose important projection information",
UserWarning,
)
projstring = self.crs.to_proj4(4)
projstring = re.sub(r"\s\+units=[\w-]+", "", projstring)
projstring += " +units=m"
self.crs = CRS(projstring)
# ignore export to PROJ string deprecation warning
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore",
"You will likely lose important projection information",
UserWarning,
)
projstring = self.crs.to_proj4() or self.crs.srs
self.srs = re.sub(r"\s\+?type=crs", "", projstring).strip()
super().__init__(TransformerFromPipeline(cstrencode(self.srs)))
def __call__(
self,
longitude: Any,
latitude: Any,
inverse: bool = False,
errcheck: bool = False,
radians: bool = False,
) -> Tuple[Any, Any]:
"""
Calling a Proj class instance with the arguments lon, lat will
convert lon/lat (in degrees) to x/y native map projection
coordinates (in meters).
Inputs should be doubles (they will be cast to doubles if they
are not, causing a slight performance hit).
Works with numpy and regular python array objects, python
sequences and scalars, but is fastest for array objects.
Accepted numeric scalar or array:
- :class:`int`
- :class:`float`
- :class:`numpy.floating`
- :class:`numpy.integer`
- :class:`list`
- :class:`tuple`
- :class:`array.array`
- :class:`numpy.ndarray`
- :class:`xarray.DataArray`
- :class:`pandas.Series`
Parameters
----------
longitude: scalar or array
Input longitude coordinate(s).
latitude: scalar or array
Input latitude coordinate(s).
inverse: bool, default=False
If inverse is True the inverse transformation from x/y to
lon/lat is performed.
radians: bool, default=False
If True, will expect input data to be in radians and will return radians
if the projection is geographic. Otherwise, it uses degrees.
This does not work with pyproj 2 and is ignored. It will be enabled again
in pyproj 3.
errcheck: bool, default=False
If True, an exception is raised if the errors are found in the process.
If False, ``inf`` is returned for errors.
Returns
-------
Tuple[Any, Any]:
The transformed coordinates.
"""
if inverse:
direction = TransformDirection.INVERSE
else:
direction = TransformDirection.FORWARD
return self.transform(
xx=longitude,
yy=latitude,
direction=direction,
errcheck=errcheck,
radians=radians,
)
def get_factors(
self,
longitude: Any,
latitude: Any,
radians: bool = False,
errcheck: bool = False,
) -> Factors:
"""
.. versionadded:: 2.6.0
Calculate various cartographic properties, such as scale factors, angular
distortion and meridian convergence. Depending on the underlying projection
values will be calculated either numerically (default) or analytically.
The function also calculates the partial derivatives of the given
coordinate.
Accepted numeric scalar or array:
- :class:`int`
- :class:`float`
- :class:`numpy.floating`
- :class:`numpy.integer`
- :class:`list`
- :class:`tuple`
- :class:`array.array`
- :class:`numpy.ndarray`
- :class:`xarray.DataArray`
- :class:`pandas.Series`
Parameters
----------
longitude: scalar or array
Input longitude coordinate(s).
latitude: scalar or array
Input latitude coordinate(s).
radians: bool, default=False
If True, will expect input data to be in radians and will return radians
if the projection is geographic. Otherwise, it uses degrees.
errcheck: bool, default=False
If True, an exception is raised if the errors are found in the process.
If False, ``inf`` is returned on error.
Returns
-------
Factors
"""
# process inputs, making copies that support buffer API.
inx, x_data_type = _copytobuffer(longitude)
iny = _copytobuffer(latitude)[0]
# calculate the factors
factors = self._transformer._get_factors(
inx, iny, radians=radians, errcheck=errcheck
)
# if inputs were lists, tuples or floats, convert back.
return Factors(
meridional_scale=_convertback(x_data_type, factors.meridional_scale),
parallel_scale=_convertback(x_data_type, factors.parallel_scale),
areal_scale=_convertback(x_data_type, factors.areal_scale),
angular_distortion=_convertback(x_data_type, factors.angular_distortion),
meridian_parallel_angle=_convertback(
x_data_type, factors.meridian_parallel_angle
),
meridian_convergence=_convertback(
x_data_type, factors.meridian_convergence
),
tissot_semimajor=_convertback(x_data_type, factors.tissot_semimajor),
tissot_semiminor=_convertback(x_data_type, factors.tissot_semiminor),
dx_dlam=_convertback(x_data_type, factors.dx_dlam),
dx_dphi=_convertback(x_data_type, factors.dx_dphi),
dy_dlam=_convertback(x_data_type, factors.dy_dlam),
dy_dphi=_convertback(x_data_type, factors.dy_dphi),
)
def definition_string(self) -> str:
"""Returns formal definition string for projection
>>> Proj("epsg:4326").definition_string()
'proj=longlat datum=WGS84 no_defs ellps=WGS84 towgs84=0,0,0'
"""
return self.definition
def to_latlong_def(self) -> Optional[str]:
"""return the definition string of the geographic (lat/lon)
coordinate version of the current projection"""
return self.crs.geodetic_crs.to_proj4(4) if self.crs.geodetic_crs else None
def to_latlong(self) -> "Proj":
"""return a new Proj instance which is the geographic (lat/lon)
coordinate version of the current projection"""
return Proj(self.crs.geodetic_crs)
def __reduce__(self) -> Tuple[Type["Proj"], Tuple[str]]:
"""special method that allows pyproj.Proj instance to be pickled"""
return self.__class__, (self.crs.srs,)
| {
"content_hash": "c4e8e4c09b0056b0f1d470a0752bcd65",
"timestamp": "",
"source": "github",
"line_count": 299,
"max_line_length": 85,
"avg_line_length": 37.64882943143812,
"alnum_prop": 0.5930532113351692,
"repo_name": "ocefpaf/pyproj",
"id": "c4f1041b7132cf2d540d08e19eef8836d32d3007",
"size": "11257",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pyproj/proj.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cython",
"bytes": "194556"
},
{
"name": "Makefile",
"bytes": "2671"
},
{
"name": "Python",
"bytes": "649387"
},
{
"name": "Shell",
"bytes": "10347"
}
],
"symlink_target": ""
} |
import os
import io
# Import maras libs
try:
from maras import __version__
except ImportError:
from __init__ import __version__
# Import third party libs
import msgpack
class IndexException(Exception):
pass
class IndexNotFoundException(IndexException):
pass
class ReindexException(IndexException):
pass
class TryReindexException(ReindexException):
pass
class ElemNotFound(IndexException):
pass
class DocIdNotFound(ElemNotFound):
pass
class IndexConflict(IndexException):
pass
class IndexPreconditionsException(IndexException):
pass
class Index(object):
__version__ = __version__
custom_header = '' # : use it for imports required by your index
def __init__(self,
db_path,
name):
self.name = name
self._start_ind = 500
self.db_path = db_path
def open_index(self):
if not os.path.isfile(os.path.join(self.db_path, self.name + '_buck')):
raise IndexException("Doesn't exists")
self.buckets = io.open(
os.path.join(self.db_path, self.name + "_buck"), 'r+b', buffering=0)
self._fix_params()
self._open_storage()
def _close(self):
self.buckets.close()
self.storage.close()
def close_index(self):
self.flush()
self.fsync()
self._close()
def create_index(self):
raise NotImplementedError()
def _get_props(self):
self.buckets.seek(0)
raw_ind = self.buckets.read(self._start_ind)
pivot = 1
while pivot < self._start_ind:
try:
return msgpack.loads(raw_ind[:pivot])
except Exception:
pivot += 1
continue
def _fix_params(self):
props = self._get_props()
for k, v in props.iteritems():
self.__dict__[k] = v
self.buckets.seek(0, 2)
def _save_params(self, in_params={}):
props = self._get_props()
props.update(in_params)
self.buckets.seek(0)
data = msgpack.dumps(props)
if len(data) > self._start_ind:
raise IndexException("To big props")
self.buckets.write(data)
self.flush()
self.buckets.seek(0, 2)
self.__dict__.update(props)
def _open_storage(self, *args, **kwargs):
pass
def _create_storage(self, *args, **kwargs):
pass
def _destroy_storage(self, *args, **kwargs):
self.storage.destroy()
def _find_key(self, key):
raise NotImplementedError()
def update(self, doc_id, key, start, size):
raise NotImplementedError()
def insert(self, doc_id, key, start, size):
raise NotImplementedError()
def get(self, key):
raise NotImplementedError()
def get_many(self, key, start_from=None, limit=0):
raise NotImplementedError()
def all(self, start_pos):
raise NotImplementedError()
def delete(self, key, start, size):
raise NotImplementedError()
def make_key_value(self, data):
raise NotImplementedError()
def make_key(self, data):
raise NotImplementedError()
def compact(self, *args, **kwargs):
raise NotImplementedError()
def destroy(self, *args, **kwargs):
self._close()
bucket_file = os.path.join(self.db_path, self.name + '_buck')
os.unlink(bucket_file)
self._destroy_storage()
self._find_key.clear()
def flush(self):
try:
self.buckets.flush()
self.storage.flush()
except:
pass
def fsync(self):
try:
os.fsync(self.buckets.fileno())
self.storage.fsync()
except:
pass
def update_with_storage(self, doc_id, key, value):
if value:
start, size = self.storage.insert(value)
else:
start = 1
size = 0
return self.update(doc_id, key, start, size)
def insert_with_storage(self, doc_id, key, value):
if value:
start, size = self.storage.insert(value)
else:
start = 1
size = 0
return self.insert(doc_id, key, start, size)
| {
"content_hash": "3e69478b87accc07df2aac6bf62fe508",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 80,
"avg_line_length": 23.318681318681318,
"alnum_prop": 0.5721017907634307,
"repo_name": "thatch45/maras_old",
"id": "1e3c6d7ef9673c2f7dc5dea6b6dcbd69bd5822c2",
"size": "4915",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "maras/index.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "446918"
}
],
"symlink_target": ""
} |
class JobQueue:
def read_data(self):
self.num_workers, m = map(int, input().split())
self.jobs = list(map(int, input().split()))
assert m == len(self.jobs)
def write_response(self):
for i in range(len(self.jobs)):
print(self.assigned_workers[i], self.start_times[i])
def Swap(self, i, j):
# self._swaps.append((i,j))
temp = self.heap_index[i]
self.heap_index[i] = self.heap_index[j]
self.heap_index[j] = temp
def LeftChild(self, i):
return 2*i + 1
def RightChild(self, i):
return 2*i + 2
def SiftDown(self, i):
minIndex = i
l = self.LeftChild(i)
r = self.RightChild(i)
minTemp = i
if r <= self.num_workers - 1:
if self.threads[self.heap_index[l]] < self.threads[self.heap_index[r]]:
minTemp = l
elif self.threads[self.heap_index[r]] < self.threads[self.heap_index[l]]:
minTemp = r
else:
minTemp = l if self.heap_index[l] < self.heap_index[r] else r
elif l <= self.num_workers - 1:
minTemp = l
if minTemp != i:
if self.threads[self.heap_index[minTemp]] < self.threads[self.heap_index[minIndex]]:
minIndex = minTemp
elif self.threads[self.heap_index[minTemp]] == self.threads[self.heap_index[minIndex]]:
minIndex = minIndex if self.heap_index[minIndex] < self.heap_index[minTemp] else minTemp
if i != minIndex:
self.Swap(i, minIndex)
self.SiftDown(minIndex)
def assign_jobs(self):
# TODO: replace this code with a faster algorithm.
self.assigned_workers = [None] * len(self.jobs)
self.start_times = [None] * len(self.jobs)
self.threads = [0] * self.num_workers
self.heap_index = list(range(self.num_workers))
for i in range(len(self.jobs)):
self.assigned_workers[i] = self.heap_index[0]
self.start_times[i] = self.threads[self.heap_index[0]]
self.threads[self.heap_index[0]] += self.jobs[i]
self.SiftDown(0)
def solve(self):
self.read_data()
self.assign_jobs()
self.write_response()
if __name__ == '__main__':
job_queue = JobQueue()
job_queue.solve()
| {
"content_hash": "a789593bf595b5dcce03688029b5bb69",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 98,
"avg_line_length": 32.333333333333336,
"alnum_prop": 0.5609965635738832,
"repo_name": "supermikol/coursera",
"id": "9c664f8085e0c247797288ed0df7bab20bcda602",
"size": "2339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Data Structures/Week 2/job_queue/job_queue.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "M",
"bytes": "135"
},
{
"name": "Matlab",
"bytes": "281938"
},
{
"name": "Python",
"bytes": "34783"
}
],
"symlink_target": ""
} |
r"""Contains functionality for making meteorological plots."""
import logging
# Trigger matplotlib wrappers
from . import _mpl # noqa: F401
from ._util import add_metpy_logo, add_timestamp, add_unidata_logo, convert_gempak_color
from .ctables import * # noqa: F403
from .declarative import * # noqa: F403
from .skewt import * # noqa: F403
from .station_plot import * # noqa: F403
from .wx_symbols import * # noqa: F403
logger = logging.getLogger(__name__)
__all__ = ctables.__all__[:] # pylint: disable=undefined-variable
__all__.extend(declarative.__all__) # pylint: disable=undefined-variable
__all__.extend(skewt.__all__) # pylint: disable=undefined-variable
__all__.extend(station_plot.__all__) # pylint: disable=undefined-variable
__all__.extend(wx_symbols.__all__) # pylint: disable=undefined-variable
__all__.extend([add_metpy_logo, add_timestamp, add_unidata_logo,
convert_gempak_color]) # pylint: disable=undefined-variable
try:
from .cartopy_utils import USCOUNTIES, USSTATES
__all__.extend([USCOUNTIES, USSTATES])
except ImportError:
logger.warning('Cannot import USCOUNTIES and USSTATES without Cartopy installed.')
| {
"content_hash": "a3f54b05ca333320317309fbf473ebc6",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 88,
"avg_line_length": 43.407407407407405,
"alnum_prop": 0.7133105802047781,
"repo_name": "ahaberlie/MetPy",
"id": "57694f9eebabeddb0b185c9f80a89520828126b4",
"size": "1325",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/metpy/plots/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1508112"
},
{
"name": "Shell",
"bytes": "1600"
}
],
"symlink_target": ""
} |
import click
@click.group("veros")
@click.version_option()
def cli():
"""Veros command-line tools"""
pass
| {
"content_hash": "bc28d1c45a9058290920fb9e63b65d0b",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 34,
"avg_line_length": 14.5,
"alnum_prop": 0.646551724137931,
"repo_name": "dionhaefner/veros",
"id": "c75964a98aca21c9e57b404ac76095de440a9bdd",
"size": "116",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "veros/cli/veros.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "310"
},
{
"name": "Cuda",
"bytes": "3112"
},
{
"name": "Cython",
"bytes": "4658"
},
{
"name": "Python",
"bytes": "732116"
}
],
"symlink_target": ""
} |
import re
from typing import List, Dict, Tuple, Optional
from pywikibot import Site
from service.ws_re.register._typing import AuthorDict, CrawlerDict
from tools import fetch_text_from_wiki_site
TRANS_DICT = str.maketrans({"[": "", "]": "", "'": ""})
class AuthorCrawler:
_SIMPLE_REGEX_MAPPING = re.compile(r"\[\"([^\]]*)\"\]\s*=\s*\"([^\"]*)\"")
_COMPLEX_REGEX_MAPPING = re.compile(r"\[\"([^\]]*)\"\]\s*=\s*\{([^\}]*)\}")
@classmethod
def get_mapping(cls, mapping: str) -> CrawlerDict:
mapping_dict = {}
for single_mapping in cls._split_mappings(mapping):
mapping_dict.update(cls._extract_mapping(single_mapping))
return mapping_dict
@staticmethod
def _split_mappings(mapping: str) -> List[str]:
mapping = re.sub(r"^return \{\n", "", mapping)
mapping = re.sub(r"\}\s?$", "", mapping)
splitted_mapping = mapping.split("\n[")
splitted_mapping = ["[" + mapping.strip().strip(",").lstrip("[")
for mapping in splitted_mapping]
return splitted_mapping
@classmethod
def _extract_mapping(cls, single_mapping: str) -> CrawlerDict:
if "{" in single_mapping:
return cls._extract_complex_mapping(single_mapping)
hit = cls._SIMPLE_REGEX_MAPPING.search(single_mapping)
if hit:
return {hit.group(1): hit.group(2)}
raise ValueError(f"{single_mapping} don't compatible to regex.")
@classmethod
def _extract_complex_mapping(cls, single_mapping: str) -> CrawlerDict:
hit = cls._COMPLEX_REGEX_MAPPING.search(single_mapping)
if hit:
sub_dict = {}
for sub_mapping in hit.group(2).split(",\n"):
sub_hit = cls._SIMPLE_REGEX_MAPPING.search(sub_mapping)
if sub_hit:
sub_dict[sub_hit.group(1)] = sub_hit.group(2)
else:
sub_dict["*"] = sub_mapping.strip().strip("\"")
return {hit.group(1): sub_dict}
raise ValueError(f"{single_mapping} not compliant to regex")
@classmethod
def get_authors(cls, text: str) -> Dict[str, AuthorDict]:
return_dict = {}
author_list = cls._split_author_table(text)
for author_sub_table in author_list:
return_dict.update(cls._get_author(author_sub_table))
return return_dict
@staticmethod
def _split_author_table(raw_table: str) -> List[str]:
hit = re.search(r"\{\|class=\"wikitable sortable\"[^\|]*?\|-\s+(.*)\s+\|\}",
raw_table, re.DOTALL)
if hit:
table = hit.group(1)
splitted_table = table.split("\n|-\n")
return splitted_table
raise ValueError("raw_table not compatible to regex.")
@staticmethod
def _split_author(author_sub_table: str) -> List[str]:
return author_sub_table.split("\n|")
@staticmethod
def _extract_author_infos(author: str) -> Tuple[str, str, str]:
author = author.lstrip("|")
# replace all templates
author = re.sub(r"\{\{.*?\}\}", "", author)
# replace all comments
author = re.sub(r"<!--.*?-->", "", author)
author = re.sub(r"<nowiki>.*?</nowiki>", "", author)
# if it's a link use only the second part
if re.search(r"\[\[", author):
splitting = author.split("|")
author_name = splitting[1]
author_lemma = splitting[0]
else:
author_name = author
author_lemma = ""
if author_lemma:
author_lemma = author_lemma.translate(TRANS_DICT)
author_name = author_name.translate(TRANS_DICT)
author_name = re.sub(r"\(.*?\)", "", author_name)
names = author_name.split(",")
# handle funky things with a "="-character
try:
if "=" in names[0]:
names[0] = names[0].split("=")[0].strip()
if "=" in names[1]:
names[1] = names[1].split("=")[0].strip()
except IndexError:
return "", names[0].strip(), author_lemma
return names[1].strip(), names[0].strip(), author_lemma
@staticmethod
def _extract_years(years: str) -> Tuple[Optional[int], Optional[int]]:
hit = re.search(r"(?<!\")(\d{4}) ?\|\| ?(\d{4})?", years)
if hit:
return int(hit.group(1)), int(hit.group(2)) if hit.group(2) else None
return None, None
@staticmethod
def _extract_wp_lemma(wp_column: str) -> Optional[str]:
hit = re.search(r"\[\[w:([^\|]*)\|", wp_column)
if hit:
return hit.group(1)
return None
@classmethod
def _get_author(cls, author_lines: str) -> Dict[str, AuthorDict]:
lines = cls._split_author(re.sub(r"<!--.*?-->", "", author_lines, flags=re.DOTALL))
author_tuple = cls._extract_author_infos(lines[0])
years = cls._extract_years(lines[1])
wp_lemma = cls._extract_wp_lemma(lines[3])
author = f"{author_tuple[0]} {author_tuple[1]}".strip()
author_dict: Dict[str, AuthorDict] = {author: {"last_name": author_tuple[1]}}
if author_tuple[0]:
author_dict[author]["first_name"] = author_tuple[0]
if author_tuple[2]:
author_dict[author]["ws_lemma"] = author_tuple[2]
birth_year = years[0]
if birth_year:
author_dict[author]["birth"] = birth_year
death_year = years[1]
if death_year:
author_dict[author]["death"] = death_year
if wp_lemma:
author_dict[author]["wp_lemma"] = wp_lemma
return author_dict
@classmethod
def process_author_infos(cls, wiki: Site) -> Dict[str, AuthorDict]:
text = fetch_text_from_wiki_site(wiki,
"Paulys Realencyclopädie der classischen "
"Altertumswissenschaft/Autoren")
return cls.get_authors(text)
@classmethod
def get_author_mapping(cls, wiki: Site) -> CrawlerDict:
text = fetch_text_from_wiki_site(wiki, "Modul:RE/Autoren")
return cls.get_mapping(text)
| {
"content_hash": "e14357ffaf7947ab789b52eaf34a008d",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 91,
"avg_line_length": 39.78064516129032,
"alnum_prop": 0.5527084009082063,
"repo_name": "the-it/WS_THEbotIT",
"id": "d3ca362009466c15ef7f6d2014b92b9a2441c9d5",
"size": "6167",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "service/ws_re/register/author_crawler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HCL",
"bytes": "3121"
},
{
"name": "Makefile",
"bytes": "3017"
},
{
"name": "Python",
"bytes": "785189"
},
{
"name": "Shell",
"bytes": "1199"
}
],
"symlink_target": ""
} |
from __future__ import print_function, absolute_import
import subprocess
import unittest
import os
import sys
import time
from .util import ShellProcess
_ROOT = os.path.dirname(os.path.realpath(__file__))
def here(*x):
return os.path.join(_ROOT, *x)
class ShellSpoutTester(unittest.TestCase):
@classmethod
def setUpClass(cls):
args = ["python", here("dummy_spout.py")]
cls.proc = subprocess.Popen(args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
print("Waiting for subprocess to start...")
time.sleep(1) # time for the subprocess to start
if cls.proc.poll() is not None:
raise Exception("Could not create subprocess.\n{}"
.format("".join(cls.proc.stderr.readlines())))
cls.shell_proc = ShellProcess(cls.proc.stdout, cls.proc.stdin)
def test_1_initial_handshake(self):
msg = {
"conf": {},
"context": {},
"pidDir": here()
}
ShellSpoutTester.shell_proc.write_message(msg)
res = ShellSpoutTester.shell_proc.read_message()
self.assertIsInstance(res, dict)
self.assertEqual(res.get("pid"), ShellSpoutTester.proc.pid)
pid = str(res["pid"])
self.assertTrue(os.path.exists(here(pid)))
self.assertTrue(os.path.isfile(here(pid)))
def test_2_next_tuple(self):
msg = {"command": "next"}
res = ShellSpoutTester.shell_proc.query_subprocess(msg)
self.assertEqual(len(res), 2)
cmd = res[0]
self.assertDictEqual({"command": "emit", "tuple": ["test"]}, cmd)
cmd = res[1]
self.assertDictEqual({"command": "sync"}, cmd)
def test_3_ack(self):
msg = {"command": "ack", "id": "123456"}
res = ShellSpoutTester.shell_proc.query_subprocess(msg)
self.assertEqual(len(res), 1)
cmd = res[0]
self.assertDictEqual({"command": "sync"}, cmd)
def test_4_fail(self):
msg = {"command": "fail", "id": "123456"}
res = ShellSpoutTester.shell_proc.query_subprocess(msg)
self.assertEqual(len(res), 1)
cmd = res[0]
self.assertDictEqual({"command": "sync"}, cmd)
@classmethod
def tearDownClass(cls):
os.remove(here(str(cls.proc.pid)))
cls.proc.kill()
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "85377d0f88a44fc629969e4daff10bdf",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 74,
"avg_line_length": 29.98780487804878,
"alnum_prop": 0.5811305408702725,
"repo_name": "thedrow/streamparse",
"id": "876fa711337b7a2116455ac79dd8cc8d6a8cc51b",
"size": "2459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/ipc/test_spout.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import unittest
from moveit_ros_visualization.moveitjoy_module import MoveitJoy
import rospy
import rostest
import os
_PKGNAME = 'moveit_ros_visualization'
_NODENAME = 'test_moveit_joy'
class TestMoveitJoy(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestMoveitJoy, self).__init__(*args, **kwargs)
def test_constructor(self):
# As there is no robot model loaded, the constructor will raise an exception
# However, at least we tested, that MoveitJoy constructor can be called...
self.assertRaises(RuntimeError, MoveitJoy)
if __name__ == '__main__':
rospy.init_node(_NODENAME)
rostest.rosrun(_PKGNAME, _NODENAME, TestMoveitJoy)
# Don't get trapped by https://github.com/ros/ros_comm/issues/870
os._exit(0) # exit without cleanup
| {
"content_hash": "47887c820f068ffa68fd5936b3721ef1",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 84,
"avg_line_length": 33.625,
"alnum_prop": 0.701363073110285,
"repo_name": "davetcoleman/moveit",
"id": "b3974e8e52f32b5a31dd4804e495b5e7a47c3b7e",
"size": "2522",
"binary": false,
"copies": "1",
"ref": "refs/heads/melodic-devel",
"path": "moveit_ros/visualization/test/test_moveit_joy.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4440"
},
{
"name": "C++",
"bytes": "5321283"
},
{
"name": "CMake",
"bytes": "104054"
},
{
"name": "GDB",
"bytes": "375"
},
{
"name": "HTML",
"bytes": "1173"
},
{
"name": "Makefile",
"bytes": "246"
},
{
"name": "Python",
"bytes": "184716"
}
],
"symlink_target": ""
} |
"""
Created on Mar 18, 2012
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 18, 2012"
import os
import unittest
import warnings
from pymatgen.apps.borg.hive import VaspToComputedEntryDrone
from pymatgen.apps.borg.queen import BorgQueen
from pymatgen.util.testing import PymatgenTest
class BorgQueenTest(unittest.TestCase):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.simplefilter("default")
def test_get_data(self):
drone = VaspToComputedEntryDrone()
self.queen = BorgQueen(drone, PymatgenTest.TEST_FILES_DIR, 1)
data = self.queen.get_data()
self.assertEqual(len(data), 12)
def test_load_data(self):
drone = VaspToComputedEntryDrone()
queen = BorgQueen(drone)
queen.load_data(os.path.join(PymatgenTest.TEST_FILES_DIR, "assimilated.json"))
self.assertEqual(len(queen.get_data()), 1)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "f999048da04b1bbe49227967b00ec5b7",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 86,
"avg_line_length": 25.813953488372093,
"alnum_prop": 0.6693693693693694,
"repo_name": "gmatteo/pymatgen",
"id": "55c9206bfe776093f4b963b2b14a90d5d61ec261",
"size": "1221",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pymatgen/apps/borg/tests/test_queen.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "277"
},
{
"name": "Python",
"bytes": "7840569"
},
{
"name": "Shell",
"bytes": "711"
}
],
"symlink_target": ""
} |
""" This example script demonstrates pySBOL's API for genetic design automation. A multi-level abstraction hierarchy is assembled and recursively compiled into a target sequence. """
from sbol import *
setHomespace('http://sys-bio.org')
doc = Document()
doc.displayId = 'example'
doc.name = 'example'
doc.description = 'Example BioBrick assembly for ACS Syn Bio'
igem = PartShop('https://synbiohub.org/public/igem')
igem.pull('BBa_R0010', doc)
igem.pull('BBa_B0032', doc)
igem.pull('BBa_E0040', doc)
igem.pull('BBa_B0012', doc)
igem.pull('pSB1A3', doc)
print(doc)
for obj in doc:
print (obj)
r0010 = doc.componentDefinitions['BBa_R0010']
b0032 = doc.componentDefinitions['BBa_B0032']
e0040 = doc.componentDefinitions['BBa_E0040']
b0012 = doc.componentDefinitions['BBa_B0012']
backbone = doc.componentDefinitions['pSB1A3']
insert = ComponentDefinition('insert')
vector = ComponentDefinition('vector')
doc.addComponentDefinition(insert)
doc.addComponentDefinition(vector)
insert.assemblePrimaryStructure([ r0010, b0032, e0040, b0012 ], IGEM_STANDARD_ASSEMBLY)
for component in insert.getPrimaryStructure():
print(component)
vector.assemblePrimaryStructure([backbone, insert])
target_sequence = vector.compile()
print(target_sequence)
result = doc.write('gene_cassette.xml')
print(result)
igem.login(<USERNAME>) # User must register at https://synbiohub.org
igem.submit(doc)
| {
"content_hash": "6d0590ebe5d32331937a9a343684cf8f",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 182,
"avg_line_length": 30.130434782608695,
"alnum_prop": 0.7662337662337663,
"repo_name": "SynBioDex/pySBOL",
"id": "83788e363a817f03919031828b0badac3240b267",
"size": "1386",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/assembly_example.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "26740815"
},
{
"name": "Inno Setup",
"bytes": "28900"
},
{
"name": "Java",
"bytes": "478911"
},
{
"name": "Makefile",
"bytes": "2180"
},
{
"name": "Python",
"bytes": "10599711"
},
{
"name": "Shell",
"bytes": "1141"
},
{
"name": "TeX",
"bytes": "61255"
}
],
"symlink_target": ""
} |
from django.forms import Select
from django.utils.functional import lazy
from wagtail.wagtailcore.models import Site
from .client import Client
class ApiSelect(Select):
def __init__(self, attrs=None, **kwargs):
super(ApiSelect, self).__init__(attrs, ())
self.choices = lazy(self._get_choices, tuple)()
self._cached_choices = None
def _get_choices(self):
if self._cached_choices is None:
from .models import SendinBlueSettings
site = Site.objects.first()
settings = SendinBlueSettings.for_site(site)
api = Client(settings.apikey)
self._cached_choices = self.get_choices(api)
return self._cached_choices
def get_choices(self, api):
raise NotImplementedError
class AttributesSelect(ApiSelect):
def get_choices(self, api):
data = api.get_attributes()
attributes = data['data']['normal_attributes']
names = [a['name'] for a in attributes]
return map(lambda n: (n, n), names)
class ListSelect(ApiSelect):
def get_choices(self, api):
data = api.get_lists()
choices = [
(l['id'], l['name'])
for l in data['data']['lists']
]
return choices if self.is_required else [(None, '')] + choices
class TemplateSelect(ApiSelect):
def get_choices(self, api):
data = api.get_campaigns_v2('template', 'draft', 1, 500)
choices = [
(l['id'], l['campaign_name'])
for l in data['data']['campaign_records']
]
return choices if self.is_required else [(None, '')] + choices
| {
"content_hash": "a9b5462f787221bf075322515be57ba9",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 70,
"avg_line_length": 31.403846153846153,
"alnum_prop": 0.5995101041028781,
"repo_name": "apihackers/wagtail-sendinblue",
"id": "b376b686895cf7c73dd5be3e8035557793dfe234",
"size": "1633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sendinblue/widgets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4107"
},
{
"name": "HTML",
"bytes": "10873"
},
{
"name": "JavaScript",
"bytes": "203"
},
{
"name": "PHP",
"bytes": "6296"
},
{
"name": "Python",
"bytes": "151193"
}
],
"symlink_target": ""
} |
from ._models_py3 import ApiKey
from ._models_py3 import ApiKeyListResult
from ._models_py3 import CheckNameAvailabilityParameters
from ._models_py3 import ConfigurationStore
from ._models_py3 import ConfigurationStoreListResult
from ._models_py3 import ConfigurationStoreUpdateParameters
from ._models_py3 import DeletedConfigurationStore
from ._models_py3 import DeletedConfigurationStoreListResult
from ._models_py3 import EncryptionProperties
from ._models_py3 import ErrorAdditionalInfo
from ._models_py3 import ErrorDetails
from ._models_py3 import ErrorResponse
from ._models_py3 import KeyValue
from ._models_py3 import KeyValueListResult
from ._models_py3 import KeyVaultProperties
from ._models_py3 import LogSpecification
from ._models_py3 import MetricDimension
from ._models_py3 import MetricSpecification
from ._models_py3 import NameAvailabilityStatus
from ._models_py3 import OperationDefinition
from ._models_py3 import OperationDefinitionDisplay
from ._models_py3 import OperationDefinitionListResult
from ._models_py3 import OperationProperties
from ._models_py3 import PrivateEndpoint
from ._models_py3 import PrivateEndpointConnection
from ._models_py3 import PrivateEndpointConnectionListResult
from ._models_py3 import PrivateEndpointConnectionReference
from ._models_py3 import PrivateLinkResource
from ._models_py3 import PrivateLinkResourceListResult
from ._models_py3 import PrivateLinkServiceConnectionState
from ._models_py3 import RegenerateKeyParameters
from ._models_py3 import Resource
from ._models_py3 import ResourceIdentity
from ._models_py3 import ServiceSpecification
from ._models_py3 import Sku
from ._models_py3 import SystemData
from ._models_py3 import TrackedResource
from ._models_py3 import UserIdentity
from ._app_configuration_management_client_enums import (
ActionsRequired,
ConfigurationResourceType,
ConnectionStatus,
CreateMode,
CreatedByType,
IdentityType,
ProvisioningState,
PublicNetworkAccess,
)
from ._patch import __all__ as _patch_all
from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
from ._patch import patch_sdk as _patch_sdk
__all__ = [
'ApiKey',
'ApiKeyListResult',
'CheckNameAvailabilityParameters',
'ConfigurationStore',
'ConfigurationStoreListResult',
'ConfigurationStoreUpdateParameters',
'DeletedConfigurationStore',
'DeletedConfigurationStoreListResult',
'EncryptionProperties',
'ErrorAdditionalInfo',
'ErrorDetails',
'ErrorResponse',
'KeyValue',
'KeyValueListResult',
'KeyVaultProperties',
'LogSpecification',
'MetricDimension',
'MetricSpecification',
'NameAvailabilityStatus',
'OperationDefinition',
'OperationDefinitionDisplay',
'OperationDefinitionListResult',
'OperationProperties',
'PrivateEndpoint',
'PrivateEndpointConnection',
'PrivateEndpointConnectionListResult',
'PrivateEndpointConnectionReference',
'PrivateLinkResource',
'PrivateLinkResourceListResult',
'PrivateLinkServiceConnectionState',
'RegenerateKeyParameters',
'Resource',
'ResourceIdentity',
'ServiceSpecification',
'Sku',
'SystemData',
'TrackedResource',
'UserIdentity',
'ActionsRequired',
'ConfigurationResourceType',
'ConnectionStatus',
'CreateMode',
'CreatedByType',
'IdentityType',
'ProvisioningState',
'PublicNetworkAccess',
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk() | {
"content_hash": "9f641896bf2e913044118d9d11be898b",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 78,
"avg_line_length": 33.95145631067961,
"alnum_prop": 0.7760937946811552,
"repo_name": "Azure/azure-sdk-for-python",
"id": "95fdfb0529b8f8eeef74b2367c7b2c48075c3e35",
"size": "3965",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/appconfiguration/azure-mgmt-appconfiguration/azure/mgmt/appconfiguration/v2022_05_01/models/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import mock
from oslo_serialization import jsonutils
from nailgun import consts
from nailgun import objects
from nailgun.test.base import BaseMasterNodeSettignsTest
from nailgun.test.base import fake_tasks
from nailgun.utils import reverse
class TestStatsUserTaskManagers(BaseMasterNodeSettignsTest):
@fake_tasks(override_state={'progress': 100,
'status': consts.TASK_STATUSES.ready})
def test_create_stats_user(self):
self.env.create(
nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True},
{'roles': ['controller'], 'pending_addition': True},
{'roles': ['controller'], 'pending_addition': True},
]
)
deploy_task = self.env.launch_deployment()
self.env.wait_ready(deploy_task)
self.assertFalse(objects.MasterNodeSettings.must_send_stats())
data = {'settings': {'statistics': {
'user_choice_saved': {'value': True},
'send_anonymous_statistic': {'value': True}
}}}
resp = self.app.patch(
reverse('MasterNodeSettingsHandler'),
headers=self.default_headers,
params=jsonutils.dumps(data)
)
self.assertEqual(200, resp.status_code)
task = objects.TaskCollection.filter_by(
None, name=consts.TASK_NAMES.create_stats_user).first()
self.assertIsNotNone(task)
@fake_tasks(override_state={'progress': 100,
'status': consts.TASK_STATUSES.ready})
def test_no_tasks_duplication(self):
self.env.create(
nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True},
]
)
deploy_task = self.env.launch_deployment()
self.env.wait_ready(deploy_task)
# Tuple of tuples (task_name, must_send_stats)
tasks_params = (
(consts.TASK_NAMES.create_stats_user, True),
(consts.TASK_NAMES.remove_stats_user, False)
)
for task_name, must_send_stats in tasks_params:
task_count_before = objects.TaskCollection.filter_by(
None, name=task_name).count()
data = {'settings': {'statistics': {
'user_choice_saved': {'value': True},
'send_anonymous_statistic': {'value': must_send_stats}
}}}
if must_send_stats:
self.disable_sending_stats()
else:
self.enable_sending_stats()
with mock.patch('nailgun.task.fake.settings.'
'FAKE_TASKS_TICK_INTERVAL', 10):
resp = self.app.patch(
reverse('MasterNodeSettingsHandler'),
headers=self.default_headers,
params=jsonutils.dumps(data)
)
self.assertEqual(200, resp.status_code)
resp = self.app.patch(
reverse('MasterNodeSettingsHandler'),
headers=self.default_headers,
params=jsonutils.dumps(data)
)
self.assertEqual(200, resp.status_code)
task_count = objects.TaskCollection.filter_by(
None, name=task_name).count()
self.assertEqual(task_count_before + 1, task_count)
@fake_tasks(override_state={'progress': 100,
'status': consts.TASK_STATUSES.ready})
def test_no_tasks_for_non_operational_clusters(self):
self.env.create(
nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True},
]
)
deploy_task = self.env.launch_deployment()
self.env.wait_ready(deploy_task)
cluster = self.env.clusters[0]
# Tuple of tuples (task_name, must_send_stats)
tasks_params = (
(consts.TASK_NAMES.create_stats_user, True),
(consts.TASK_NAMES.remove_stats_user, False)
)
for task_name, must_send_stats in tasks_params:
with mock.patch('nailgun.objects.MasterNodeSettings.'
'must_send_stats', return_value=must_send_stats):
for cluster_status in consts.CLUSTER_STATUSES:
if cluster_status == consts.CLUSTER_STATUSES.operational:
continue
cluster.status = cluster_status
self.env.db().flush()
resp = self.app.patch(
reverse('MasterNodeSettingsHandler'),
headers=self.default_headers,
params='{}'
)
self.assertEqual(200, resp.status_code)
task_count = objects.TaskCollection.filter_by(
None, name=task_name).count()
self.assertEqual(0, task_count)
def test_create_stats_user_not_required(self):
with mock.patch('nailgun.objects.MasterNodeSettings.must_send_stats',
return_value=False):
with mock.patch('nailgun.task.manager.CreateStatsUserTaskManager.'
'execute') as executer:
resp = self.app.patch(
reverse('MasterNodeSettingsHandler'),
headers=self.default_headers,
params=jsonutils.dumps({})
)
self.assertEqual(200, resp.status_code)
self.assertFalse(executer.called)
def test_create_stats_user_called(self):
self.assertFalse(objects.MasterNodeSettings.must_send_stats())
data = {'settings': {'statistics': {
'user_choice_saved': {'value': True},
'send_anonymous_statistic': {'value': True}
}}}
with mock.patch('nailgun.task.manager.CreateStatsUserTaskManager.'
'execute') as executer:
resp = self.app.patch(
reverse('MasterNodeSettingsHandler'),
headers=self.default_headers,
params=jsonutils.dumps(data)
)
self.assertEqual(200, resp.status_code)
self.assertTrue(executer.called)
@fake_tasks(override_state={'progress': 100,
'status': consts.TASK_STATUSES.ready})
def test_remove_stats_user(self):
self.env.create(
nodes_kwargs=[
{'roles': ['controller'], 'pending_addition': True},
{'roles': ['controller'], 'pending_addition': True},
{'roles': ['controller'], 'pending_addition': True},
]
)
self.enable_sending_stats()
self.assertTrue(objects.MasterNodeSettings.must_send_stats())
deploy_task = self.env.launch_deployment()
self.env.wait_ready(deploy_task)
data = {'settings': {'statistics': {
'user_choice_saved': {'value': True},
'send_anonymous_statistic': {'value': False}
}}}
resp = self.app.patch(
reverse('MasterNodeSettingsHandler'),
headers=self.default_headers,
params=jsonutils.dumps(data)
)
self.assertEqual(200, resp.status_code)
task = objects.TaskCollection.filter_by(
None, name=consts.TASK_NAMES.remove_stats_user).first()
self.assertIsNotNone(task)
def test_remove_stats_user_not_required(self):
with mock.patch('nailgun.objects.MasterNodeSettings.must_send_stats',
return_value=True):
with mock.patch('nailgun.task.manager.RemoveStatsUserTaskManager.'
'execute') as executor:
resp = self.app.patch(
reverse('MasterNodeSettingsHandler'),
headers=self.default_headers,
params=jsonutils.dumps({})
)
self.assertEqual(200, resp.status_code)
self.assertFalse(executor.called)
def test_remove_stats_user_called(self):
self.enable_sending_stats()
self.assertTrue(objects.MasterNodeSettings.must_send_stats())
data = {'settings': {'statistics': {
'user_choice_saved': {'value': True},
'send_anonymous_statistic': {'value': False}
}}}
with mock.patch('nailgun.task.manager.RemoveStatsUserTaskManager.'
'execute') as executor:
resp = self.app.patch(
reverse('MasterNodeSettingsHandler'),
headers=self.default_headers,
params=jsonutils.dumps(data)
)
self.assertEqual(200, resp.status_code)
self.assertTrue(executor.called)
| {
"content_hash": "40eeb79913d8a33e5d6249f0b42499f4",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 78,
"avg_line_length": 38.05603448275862,
"alnum_prop": 0.5469475591799751,
"repo_name": "eayunstack/fuel-web",
"id": "959d29d42ab5f9b143147f6bc75a9b041118be1f",
"size": "9464",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nailgun/nailgun/test/integration/test_stats_user_task_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "97543"
},
{
"name": "HTML",
"bytes": "2844"
},
{
"name": "JavaScript",
"bytes": "815534"
},
{
"name": "Mako",
"bytes": "1943"
},
{
"name": "Python",
"bytes": "3710735"
},
{
"name": "Ruby",
"bytes": "13649"
},
{
"name": "Shell",
"bytes": "22527"
}
],
"symlink_target": ""
} |
from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union, cast
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._app_service_certificate_orders_operations import build_create_or_update_certificate_request_initial, build_create_or_update_request_initial, build_delete_certificate_request, build_delete_request, build_get_certificate_request, build_get_request, build_list_by_resource_group_request, build_list_certificates_request, build_list_request, build_reissue_request, build_renew_request, build_resend_email_request, build_resend_request_emails_request, build_retrieve_certificate_actions_request, build_retrieve_certificate_email_history_request, build_retrieve_site_seal_request, build_update_certificate_request, build_update_request, build_validate_purchase_information_request, build_verify_domain_ownership_request
from .._vendor import MixinABC
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class AppServiceCertificateOrdersOperations: # pylint: disable=too-many-public-methods
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.web.v2019_08_01.aio.WebSiteManagementClient`'s
:attr:`app_service_certificate_orders` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(
self,
**kwargs: Any
) -> AsyncIterable[_models.AppServiceCertificateOrderCollection]:
"""List all certificate orders in a subscription.
List all certificate orders in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AppServiceCertificateOrderCollection or the result
of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateOrderCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateOrderCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AppServiceCertificateOrderCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.CertificateRegistration/certificateOrders"} # type: ignore
@distributed_trace_async
async def validate_purchase_information( # pylint: disable=inconsistent-return-statements
self,
app_service_certificate_order: _models.AppServiceCertificateOrder,
**kwargs: Any
) -> None:
"""Validate information for a certificate order.
Validate information for a certificate order.
:param app_service_certificate_order: Information for a certificate order.
:type app_service_certificate_order:
~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateOrder
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[None]
_json = self._serialize.body(app_service_certificate_order, 'AppServiceCertificateOrder')
request = build_validate_purchase_information_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.validate_purchase_information.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
validate_purchase_information.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.CertificateRegistration/validateCertificateRegistrationInformation"} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable[_models.AppServiceCertificateOrderCollection]:
"""Get certificate orders in a resource group.
Get certificate orders in a resource group.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AppServiceCertificateOrderCollection or the result
of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateOrderCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateOrderCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_resource_group.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AppServiceCertificateOrderCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
certificate_order_name: str,
**kwargs: Any
) -> _models.AppServiceCertificateOrder:
"""Get a certificate order.
Get a certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order..
:type certificate_order_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppServiceCertificateOrder, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateOrder
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateOrder]
request = build_get_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
certificate_order_name: str,
certificate_distinguished_name: _models.AppServiceCertificateOrder,
**kwargs: Any
) -> _models.AppServiceCertificateOrder:
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateOrder]
_json = self._serialize.body(certificate_distinguished_name, 'AppServiceCertificateOrder')
request = build_create_or_update_request_initial(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
certificate_order_name: str,
certificate_distinguished_name: _models.AppServiceCertificateOrder,
**kwargs: Any
) -> AsyncLROPoller[_models.AppServiceCertificateOrder]:
"""Create or update a certificate purchase order.
Create or update a certificate purchase order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param certificate_distinguished_name: Distinguished name to use for the certificate order.
:type certificate_distinguished_name:
~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateOrder
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AppServiceCertificateOrder or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateOrder]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateOrder]
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial( # type: ignore
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
certificate_distinguished_name=certificate_distinguished_name,
api_version=api_version,
content_type=content_type,
cls=lambda x,y,z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(
lro_delay,
**kwargs
)) # type: AsyncPollingMethod
elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}"} # type: ignore
@distributed_trace_async
async def delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
certificate_order_name: str,
**kwargs: Any
) -> None:
"""Delete an existing certificate order.
Delete an existing certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}"} # type: ignore
@distributed_trace_async
async def update(
self,
resource_group_name: str,
certificate_order_name: str,
certificate_distinguished_name: _models.AppServiceCertificateOrderPatchResource,
**kwargs: Any
) -> _models.AppServiceCertificateOrder:
"""Create or update a certificate purchase order.
Create or update a certificate purchase order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param certificate_distinguished_name: Distinguished name to use for the certificate order.
:type certificate_distinguished_name:
~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateOrderPatchResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppServiceCertificateOrder, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateOrder
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateOrder]
_json = self._serialize.body(certificate_distinguished_name, 'AppServiceCertificateOrderPatchResource')
request = build_update_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.update.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}"} # type: ignore
@distributed_trace
def list_certificates(
self,
resource_group_name: str,
certificate_order_name: str,
**kwargs: Any
) -> AsyncIterable[_models.AppServiceCertificateCollection]:
"""List all certificates associated with a certificate order.
List all certificates associated with a certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AppServiceCertificateCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_certificates_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_certificates.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_certificates_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AppServiceCertificateCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_certificates.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates"} # type: ignore
@distributed_trace_async
async def get_certificate(
self,
resource_group_name: str,
certificate_order_name: str,
name: str,
**kwargs: Any
) -> _models.AppServiceCertificateResource:
"""Get the certificate associated with a certificate order.
Get the certificate associated with a certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param name: Name of the certificate.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppServiceCertificateResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateResource]
request = build_get_certificate_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_certificate.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates/{name}"} # type: ignore
async def _create_or_update_certificate_initial(
self,
resource_group_name: str,
certificate_order_name: str,
name: str,
key_vault_certificate: _models.AppServiceCertificateResource,
**kwargs: Any
) -> _models.AppServiceCertificateResource:
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateResource]
_json = self._serialize.body(key_vault_certificate, 'AppServiceCertificateResource')
request = build_create_or_update_certificate_request_initial(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_certificate_initial.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_certificate_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates/{name}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update_certificate(
self,
resource_group_name: str,
certificate_order_name: str,
name: str,
key_vault_certificate: _models.AppServiceCertificateResource,
**kwargs: Any
) -> AsyncLROPoller[_models.AppServiceCertificateResource]:
"""Creates or updates a certificate and associates with key vault secret.
Creates or updates a certificate and associates with key vault secret.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param name: Name of the certificate.
:type name: str
:param key_vault_certificate: Key vault certificate resource Id.
:type key_vault_certificate: ~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AppServiceCertificateResource or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateResource]
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_certificate_initial( # type: ignore
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
name=name,
key_vault_certificate=key_vault_certificate,
api_version=api_version,
content_type=content_type,
cls=lambda x,y,z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(
lro_delay,
**kwargs
)) # type: AsyncPollingMethod
elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update_certificate.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates/{name}"} # type: ignore
@distributed_trace_async
async def delete_certificate( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
certificate_order_name: str,
name: str,
**kwargs: Any
) -> None:
"""Delete the certificate associated with a certificate order.
Delete the certificate associated with a certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param name: Name of the certificate.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[None]
request = build_delete_certificate_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete_certificate.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_certificate.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates/{name}"} # type: ignore
@distributed_trace_async
async def update_certificate(
self,
resource_group_name: str,
certificate_order_name: str,
name: str,
key_vault_certificate: _models.AppServiceCertificatePatchResource,
**kwargs: Any
) -> _models.AppServiceCertificateResource:
"""Creates or updates a certificate and associates with key vault secret.
Creates or updates a certificate and associates with key vault secret.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param name: Name of the certificate.
:type name: str
:param key_vault_certificate: Key vault certificate resource Id.
:type key_vault_certificate:
~azure.mgmt.web.v2019_08_01.models.AppServiceCertificatePatchResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppServiceCertificateResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2019_08_01.models.AppServiceCertificateResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[_models.AppServiceCertificateResource]
_json = self._serialize.body(key_vault_certificate, 'AppServiceCertificatePatchResource')
request = build_update_certificate_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.update_certificate.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_certificate.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates/{name}"} # type: ignore
@distributed_trace_async
async def reissue( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
certificate_order_name: str,
reissue_certificate_order_request: _models.ReissueCertificateOrderRequest,
**kwargs: Any
) -> None:
"""Reissue an existing certificate order.
Reissue an existing certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param reissue_certificate_order_request: Parameters for the reissue.
:type reissue_certificate_order_request:
~azure.mgmt.web.v2019_08_01.models.ReissueCertificateOrderRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[None]
_json = self._serialize.body(reissue_certificate_order_request, 'ReissueCertificateOrderRequest')
request = build_reissue_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.reissue.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
reissue.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/reissue"} # type: ignore
@distributed_trace_async
async def renew( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
certificate_order_name: str,
renew_certificate_order_request: _models.RenewCertificateOrderRequest,
**kwargs: Any
) -> None:
"""Renew an existing certificate order.
Renew an existing certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param renew_certificate_order_request: Renew parameters.
:type renew_certificate_order_request:
~azure.mgmt.web.v2019_08_01.models.RenewCertificateOrderRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[None]
_json = self._serialize.body(renew_certificate_order_request, 'RenewCertificateOrderRequest')
request = build_renew_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.renew.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
renew.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/renew"} # type: ignore
@distributed_trace_async
async def resend_email( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
certificate_order_name: str,
**kwargs: Any
) -> None:
"""Resend certificate email.
Resend certificate email.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[None]
request = build_resend_email_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.resend_email.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
resend_email.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/resendEmail"} # type: ignore
@distributed_trace_async
async def resend_request_emails( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
certificate_order_name: str,
name_identifier: _models.NameIdentifier,
**kwargs: Any
) -> None:
"""Verify domain ownership for this certificate order.
Verify domain ownership for this certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param name_identifier: Email address.
:type name_identifier: ~azure.mgmt.web.v2019_08_01.models.NameIdentifier
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[None]
_json = self._serialize.body(name_identifier, 'NameIdentifier')
request = build_resend_request_emails_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.resend_request_emails.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
resend_request_emails.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/resendRequestEmails"} # type: ignore
@distributed_trace_async
async def retrieve_site_seal(
self,
resource_group_name: str,
certificate_order_name: str,
site_seal_request: _models.SiteSealRequest,
**kwargs: Any
) -> _models.SiteSeal:
"""Verify domain ownership for this certificate order.
Verify domain ownership for this certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param site_seal_request: Site seal request.
:type site_seal_request: ~azure.mgmt.web.v2019_08_01.models.SiteSealRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SiteSeal, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2019_08_01.models.SiteSeal
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[_models.SiteSeal]
_json = self._serialize.body(site_seal_request, 'SiteSealRequest')
request = build_retrieve_site_seal_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.retrieve_site_seal.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('SiteSeal', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
retrieve_site_seal.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/retrieveSiteSeal"} # type: ignore
@distributed_trace_async
async def verify_domain_ownership( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
certificate_order_name: str,
**kwargs: Any
) -> None:
"""Verify domain ownership for this certificate order.
Verify domain ownership for this certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[None]
request = build_verify_domain_ownership_request(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.verify_domain_ownership.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
verify_domain_ownership.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/verifyDomainOwnership"} # type: ignore
@distributed_trace_async
async def retrieve_certificate_actions(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> List[_models.CertificateOrderAction]:
"""Retrieve the list of certificate actions.
Retrieve the list of certificate actions.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the certificate order.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of CertificateOrderAction, or the result of cls(response)
:rtype: list[~azure.mgmt.web.v2019_08_01.models.CertificateOrderAction]
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[List[_models.CertificateOrderAction]]
request = build_retrieve_certificate_actions_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.retrieve_certificate_actions.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[CertificateOrderAction]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
retrieve_certificate_actions.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{name}/retrieveCertificateActions"} # type: ignore
@distributed_trace_async
async def retrieve_certificate_email_history(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> List[_models.CertificateEmail]:
"""Retrieve email history.
Retrieve email history.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the certificate order.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of CertificateEmail, or the result of cls(response)
:rtype: list[~azure.mgmt.web.v2019_08_01.models.CertificateEmail]
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2019-08-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[List[_models.CertificateEmail]]
request = build_retrieve_certificate_email_history_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.retrieve_certificate_email_history.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[CertificateEmail]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
retrieve_certificate_email_history.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{name}/retrieveEmailHistory"} # type: ignore
| {
"content_hash": "1448941a6e75b9cfad5512445f21619a",
"timestamp": "",
"source": "github",
"line_count": 1625,
"max_line_length": 733,
"avg_line_length": 46.162461538461535,
"alnum_prop": 0.6460793985122777,
"repo_name": "Azure/azure-sdk-for-python",
"id": "8f11eb857ad5d866ca78505136f1196affee32bf",
"size": "75514",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2019_08_01/aio/operations/_app_service_certificate_orders_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
"""autogenerated by genpy from emotiv_epoc/EEGFrame.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import std_msgs.msg
class EEGFrame(genpy.Message):
_md5sum = "92a93661fa971fb7494f0877c88d2361"
_type = "emotiv_epoc/EEGFrame"
_has_header = True #flag to mark the presence of a Header object
_full_text = """Header header
int32 accel_x
int32 accel_y
uint32 channel_count
string[] channel_names
int32[] signals
int32[] qualities
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
"""
__slots__ = ['header','accel_x','accel_y','channel_count','channel_names','signals','qualities']
_slot_types = ['std_msgs/Header','int32','int32','uint32','string[]','int32[]','int32[]']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,accel_x,accel_y,channel_count,channel_names,signals,qualities
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(EEGFrame, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.accel_x is None:
self.accel_x = 0
if self.accel_y is None:
self.accel_y = 0
if self.channel_count is None:
self.channel_count = 0
if self.channel_names is None:
self.channel_names = []
if self.signals is None:
self.signals = []
if self.qualities is None:
self.qualities = []
else:
self.header = std_msgs.msg.Header()
self.accel_x = 0
self.accel_y = 0
self.channel_count = 0
self.channel_names = []
self.signals = []
self.qualities = []
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2iI().pack(_x.accel_x, _x.accel_y, _x.channel_count))
length = len(self.channel_names)
buff.write(_struct_I.pack(length))
for val1 in self.channel_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.signals)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *self.signals))
length = len(self.qualities)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *self.qualities))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 12
(_x.accel_x, _x.accel_y, _x.channel_count,) = _get_struct_2iI().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.channel_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.channel_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.signals = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.qualities = struct.unpack(pattern, str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2iI().pack(_x.accel_x, _x.accel_y, _x.channel_count))
length = len(self.channel_names)
buff.write(_struct_I.pack(length))
for val1 in self.channel_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.signals)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(self.signals.tostring())
length = len(self.qualities)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(self.qualities.tostring())
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 12
(_x.accel_x, _x.accel_y, _x.channel_count,) = _get_struct_2iI().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.channel_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.channel_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.signals = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.qualities = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_2iI = None
def _get_struct_2iI():
global _struct_2iI
if _struct_2iI is None:
_struct_2iI = struct.Struct("<2iI")
return _struct_2iI
| {
"content_hash": "30625d24759512c55bba92ff3ad2daec",
"timestamp": "",
"source": "github",
"line_count": 295,
"max_line_length": 145,
"avg_line_length": 34.284745762711864,
"alnum_prop": 0.600751433656318,
"repo_name": "kaaninan/Centauri",
"id": "1fd45cc3e42e63e8e492f795d52f32b0bac87863",
"size": "10168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Software/devel/lib/python2.7/dist-packages/emotiv_epoc/msg/_EEGFrame.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "8390"
},
{
"name": "C++",
"bytes": "26871"
},
{
"name": "CMake",
"bytes": "160481"
},
{
"name": "CSS",
"bytes": "159254"
},
{
"name": "Common Lisp",
"bytes": "40769"
},
{
"name": "Eagle",
"bytes": "487883"
},
{
"name": "HTML",
"bytes": "10902"
},
{
"name": "JavaScript",
"bytes": "30548"
},
{
"name": "Python",
"bytes": "78173"
},
{
"name": "Shell",
"bytes": "3647"
}
],
"symlink_target": ""
} |
import datetime
import os
import sys
import sphinx_bootstrap_theme
sys.path.insert(0, os.path.abspath(os.path.join('..', '..', 'src')))
import dnutils
sys.path.insert(0, os.path.abspath(os.path.join('..', '%s' % dnutils.version.__basedir__)))
def setup(app):
app.add_css_file('default.css') # may also be an URL
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'dnutils'
copyright = u'%s, Daniel Nyga' % datetime.date.today().year
author = u'Daniel Nyga'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = dnutils.version.VERSION_STRING_SHORT
# The full version, including alpha/beta/rc tags.
release = dnutils.version.VERSION_STRING_FULL
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'bootstrap'
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {
# 'cssfiles': ['_static/default.css']
# }
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'dnutilsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'dnutils.tex', u'dnutils Documentation',
u'Daniel Nyga', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dnutils', u'dnutils Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'dnutils', u'dnutils Documentation',
author, 'dnutils', 'One line description of project.',
'Miscellaneous'),
]
| {
"content_hash": "3526373e2be13b93230497ae96e0c6dc",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 91,
"avg_line_length": 29.398692810457515,
"alnum_prop": 0.6658514895509116,
"repo_name": "danielnyga/dnutils",
"id": "cc9a718b8d2c0b3e2f00c705bbd2c828b23cccb6",
"size": "5158",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "967"
},
{
"name": "Python",
"bytes": "166512"
}
],
"symlink_target": ""
} |
def is_list_or_tuple(obj, length = None, min_length = None, obj_type = None):
'''
>>> is_list_or_tuple([1,2])
>>> is_list_or_tuple((1,2))
>>> is_list_or_tuple('asdf')
Traceback (most recent call last):
...
AssertionError: asdf is expected to be a list or tuple, but it is <type 'str'>
>>> is_list_or_tuple([1,2],length=2)
>>> is_list_or_tuple([1,2],length=3)
Traceback (most recent call last):
...
AssertionError: [1, 2] has length of 2, but is expected to be 3
>>> is_list_or_tuple(['asdf',u'moo'], obj_type = basestring)
>>> is_list_or_tuple([1,u'moo'], obj_type = basestring)
Traceback (most recent call last):
...
AssertionError: one of the entries in [1, u'moo'] is not of type <type 'basestring'>
>>> is_list_or_tuple(['boo',u'moo'], obj_type = basestring, length = 'asdf')
Traceback (most recent call last):
...
AssertionError: length argument (asdf) should be integer, but it is <type 'str'>
'''
assert isinstance(obj, (list,tuple)), '%s is expected to be a list or tuple, but it is %s' % (obj, obj.__class__)
if length is not None:
assert isinstance(length, int), 'length argument (%s) should be integer, but it is %s' % (length, length.__class__)
assert len(obj) == length, '%s has length of %s, but is expected to be %s' % (obj, len(obj), length)
if min_length is not None:
assert isinstance(min_length, int), 'min_length argument (%s) should be integer, but it is %s' % (min_length, min_length.__class__)
assert len(obj) >= min_length, '%s has length of %s, but is expected to be at least %s' % (obj, len(obj), min_length)
if obj_type is not None:
assert reduce(lambda x,y: x and isinstance(y,obj_type), obj, True), 'one of the entries in %s is not of type %s' % (obj,obj_type)
def is_instance(obj, obj_type):
assert isinstance(obj,obj_type), '%s is expected to be of type %s but it is %s' % (obj, obj_type, obj.__class__)
if __name__ == "__main__":
import doctest
doctest.testmod() | {
"content_hash": "10e5ccd907c2b736c917210de9e3d644",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 139,
"avg_line_length": 46.53333333333333,
"alnum_prop": 0.6031518624641834,
"repo_name": "theorm/ellison",
"id": "b78017a94562482f66d4a3a5981ffad9779e1a93",
"size": "2094",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ellison/validators.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "24093"
}
],
"symlink_target": ""
} |
from PyQt5.QtWidgets import QWidget
from PyQt5.QtGui import QPainter, QColor, QPolygon
from PyQt5.QtCore import QPoint, QRect, QSize
class Ksztalty:
""" Klasa pomocnicza, symuluje typ wyliczeniowy """
Rect, Ellipse, Polygon, Line = range(4)
class Ksztalt(QWidget):
""" Klasa definiująca widget do rysowania kształtów """
# współrzędne prostokąta i trójkąta
prost = QRect(1, 1, 101, 101)
punkty = QPolygon([
QPoint(1, 101), # punkt początkowy (x, y)
QPoint(51, 1),
QPoint(101, 101)])
def __init__(self, parent, ksztalt=Ksztalty.Rect):
super(Ksztalt, self).__init__(parent)
# kształt do narysowania
self.ksztalt = ksztalt
# kolor obramowania i wypełnienia w formacie RGB
self.kolorO = QColor(0, 0, 0)
self.kolorW = QColor(255, 255, 255)
def paintEvent(self, e):
qp = QPainter()
qp.begin(self)
self.rysujFigury(e, qp)
qp.end()
def rysujFigury(self, e, qp):
qp.setPen(self.kolorO) # kolor obramowania
qp.setBrush(self.kolorW) # kolor wypełnienia
qp.setRenderHint(QPainter.Antialiasing) # wygładzanie kształtu
if self.ksztalt == Ksztalty.Rect:
qp.drawRect(self.prost)
elif self.ksztalt == Ksztalty.Ellipse:
qp.drawEllipse(self.prost)
elif self.ksztalt == Ksztalty.Polygon:
qp.drawPolygon(self.punkty)
elif self.ksztalt == Ksztalty.Line:
qp.drawLine(self.prost.topLeft(), self.prost.bottomRight())
else: # kształt domyślny Rect
qp.drawRect(self.prost)
def sizeHint(self):
return QSize(102, 102)
def minimumSizeHint(self):
return QSize(102, 102)
def ustawKsztalt(self, ksztalt):
self.ksztalt = ksztalt
self.update()
def ustawKolorW(self, r=0, g=0, b=0):
self.kolorW = QColor(r, g, b)
self.update()
| {
"content_hash": "84b1a97581d32f3340a5ad328f812a06",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 71,
"avg_line_length": 30.746031746031747,
"alnum_prop": 0.6159008776458441,
"repo_name": "koduj-z-klasa/python101",
"id": "47a6ca85c04d086f0709a5151991668cb81b8f80",
"size": "1979",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/pyqt/widzety/ksztalty.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2740"
},
{
"name": "HTML",
"bytes": "18056"
},
{
"name": "Python",
"bytes": "157924"
}
],
"symlink_target": ""
} |
"""
Wrapper functions that will manage the creation of the objects,
build the configuration, and execute the algorithm required to optimise
a set of ctrl pulses for a given (quantum) system.
The fidelity error is some measure of distance of the system evolution
from the given target evolution in the time allowed for the evolution.
The functions minimise this fidelity error wrt the piecewise control
amplitudes in the timeslots
There are currently two quantum control pulse optmisations algorithms
implemented in this library. There are accessible through the methods
in this module. Both the algorithms use the scipy.optimize methods
to minimise the fidelity error with respect to to variables that define
the pulse.
GRAPE
-----
The default algorithm (as it was implemented here first) is GRAPE
GRadient Ascent Pulse Engineering [1][2]. It uses a gradient based method such
as BFGS to minimise the fidelity error. This makes convergence very quick
when an exact gradient can be calculated, but this limits the factors that can
taken into account in the fidelity.
CRAB
----
The CRAB [3][4] algorithm was developed at the University of Ulm.
In full it is the Chopped RAndom Basis algorithm.
The main difference is that it reduces the number of optimisation variables
by defining the control pulses by expansions of basis functions,
where the variables are the coefficients. Typically a Fourier series is chosen,
i.e. the variables are the Fourier coefficients.
Therefore it does not need to compute an explicit gradient.
By default it uses the Nelder-Mead method for fidelity error minimisation.
References
----------
1. N Khaneja et. al.
Optimal control of coupled spin dynamics: Design of NMR pulse sequences
by gradient ascent algorithms. J. Magn. Reson. 172, 296–305 (2005).
2. Shai Machnes et.al
DYNAMO - Dynamic Framework for Quantum Optimal Control
arXiv.1011.4874
3. Doria, P., Calarco, T. & Montangero, S.
Optimal Control Technique for Many-Body Quantum Dynamics.
Phys. Rev. Lett. 106, 1–4 (2011).
4. Caneva, T., Calarco, T. & Montangero, S.
Chopped random-basis quantum optimization.
Phys. Rev. A - At. Mol. Opt. Phys. 84, (2011).
"""
import numpy as np
import warnings
# QuTiP
from qutip.qobj import Qobj
import qutip.logging_utils as logging
logger = logging.get_logger()
# QuTiP control modules
import qutip.control.optimconfig as optimconfig
import qutip.control.dynamics as dynamics
import qutip.control.termcond as termcond
import qutip.control.optimizer as optimizer
import qutip.control.stats as stats
import qutip.control.errors as errors
import qutip.control.fidcomp as fidcomp
import qutip.control.propcomp as propcomp
import qutip.control.pulsegen as pulsegen
#import qutip.control.pulsegencrab as pulsegencrab
warnings.simplefilter('always', DeprecationWarning) #turn off filter
def _param_deprecation(message, stacklevel=3):
"""
Issue deprecation warning
Using stacklevel=3 will ensure message refers the function
calling with the deprecated parameter,
"""
warnings.warn(message, DeprecationWarning, stacklevel=stacklevel)
def _upper_safe(s):
try:
s = s.upper()
except:
pass
return s
def optimize_pulse(
drift, ctrls, initial, target,
num_tslots=None, evo_time=None, tau=None,
amp_lbound=None, amp_ubound=None,
fid_err_targ=1e-10, min_grad=1e-10,
max_iter=500, max_wall_time=180,
alg='GRAPE', alg_params=None,
optim_params=None, optim_method='DEF', method_params=None,
optim_alg=None, max_metric_corr=None, accuracy_factor=None,
dyn_type='GEN_MAT', dyn_params=None,
prop_type='DEF', prop_params=None,
fid_type='DEF', fid_params=None,
phase_option=None, fid_err_scale_factor=None,
tslot_type='DEF', tslot_params=None,
amp_update_mode=None,
init_pulse_type='DEF', init_pulse_params=None,
pulse_scaling=1.0, pulse_offset=0.0,
ramping_pulse_type=None, ramping_pulse_params=None,
log_level=logging.NOTSET, out_file_ext=None, gen_stats=False):
"""
Optimise a control pulse to minimise the fidelity error. The dynamics of
the system in any given timeslot are governed by the combined dynamics
generator, i.e. the sum of the ``drift + ctrl_amp[j]*ctrls[j]``.
The control pulse is an ``[n_ts, n_ctrls]`` array of piecewise amplitudes
Starting from an initial (typically random) pulse, a multivariable
optimisation algorithm attempts to determines the optimal values for the
control pulse to minimise the fidelity error. The fidelity error is some
measure of distance of the system evolution from the given target evolution
in the time allowed for the evolution.
Parameters
----------
drift : Qobj or list of Qobj
The underlying dynamics generator of the system can provide list (of
length ``num_tslots``) for time dependent drift.
ctrls : List of Qobj or array like [num_tslots, evo_time]
A list of control dynamics generators. These are scaled by the
amplitudes to alter the overall dynamics. Array-like input can be
provided for time dependent control generators.
initial : Qobj
Starting point for the evolution. Typically the identity matrix.
target : Qobj
Target transformation, e.g. gate or state, for the time evolution.
num_tslots : integer or None
Number of timeslots. ``None`` implies that timeslots will be given in
the tau array.
evo_time : float or None
Total time for the evolution. ``None`` implies that timeslots will be
given in the tau array.
tau : array[num_tslots] of floats or None
Durations for the timeslots. If this is given then ``num_tslots`` and
``evo_time`` are derived from it. ``None`` implies that timeslot
durations will be equal and calculated as ``evo_time/num_tslots``.
amp_lbound : float or list of floats
Lower boundaries for the control amplitudes. Can be a scalar value
applied to all controls or a list of bounds for each control.
amp_ubound : float or list of floats
Upper boundaries for the control amplitudes. Can be a scalar value
applied to all controls or a list of bounds for each control.
fid_err_targ : float
Fidelity error target. Pulse optimisation will terminate when the
fidelity error falls below this value.
mim_grad : float
Minimum gradient. When the sum of the squares of the gradients wrt to
the control amplitudes falls below this value, the optimisation
terminates, assuming local minima.
max_iter : integer
Maximum number of iterations of the optimisation algorithm.
max_wall_time : float
Maximum allowed elapsed time for the optimisation algorithm.
alg : string
Algorithm to use in pulse optimisation. Options are:
- 'GRAPE' (default) - GRadient Ascent Pulse Engineering
- 'CRAB' - Chopped RAndom Basis
alg_params : Dictionary
Options that are specific to the algorithm see above.
optim_params : Dictionary
The key value pairs are the attribute name and value used to set
attribute values. Note: attributes are created if they do not exist
already, and are overwritten if they do. Note: ``method_params`` are
applied afterwards and so may override these.
optim_method : string
A ``scipy.optimize.minimize`` method that will be used to optimise the
pulse for minimum fidelity error. Note that ``FMIN``, ``FMIN_BFGS`` &
``FMIN_L_BFGS_B`` will all result in calling these specific
``scipy.optimize methods``. Note the ``LBFGSB`` is equivalent to
``FMIN_L_BFGS_B`` for backwards compatibility reasons. Supplying DEF
will given alg dependent result:
- GRAPE - Default ``optim_method`` is ``FMIN_L_BFGS_B``
- CRAB - Default ``optim_method`` is ``FMIN``
method_params : dict
Parameters for the ``optim_method``. Note that where there is an
attribute of the :obj:`~qutip.control.optimizer.Optimizer` object or
the termination_conditions matching the key that attribute.
Otherwise, and in some case also, they are assumed to be method_options
for the ``scipy.optimize.minimize`` method.
optim_alg : string
Deprecated. Use ``optim_method``.
max_metric_corr : integer
Deprecated. Use ``method_params`` instead.
accuracy_factor : float
Deprecated. Use ``method_params`` instead.
dyn_type : string
Dynamics type, i.e. the type of matrix used to describe the dynamics.
Options are ``UNIT``, ``GEN_MAT``, ``SYMPL``
(see :obj:`~qutip.control.dynamics.Dynamics` classes for details).
dyn_params : dict
Parameters for the :obj:`~qutip.control.dynamics.Dynamics` object.
The key value pairs are assumed to be attribute name value pairs.
They applied after the object is created.
prop_type : string
Propagator type i.e. the method used to calculate the propagators and
propagator gradient for each timeslot options are DEF, APPROX, DIAG,
FRECHET, AUG_MAT. DEF will use the default for the specific
``dyn_type`` (see :obj:`~qutip.control.propcomp.PropagatorComputer`
classes for details).
prop_params : dict
Parameters for the :obj:`~qutip.control.propcomp.PropagatorComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
fid_type : string
Fidelity error (and fidelity error gradient) computation method.
Options are DEF, UNIT, TRACEDIFF, TD_APPROX. DEF will use the default
for the specific ``dyn_type``
(See :obj:`~qutip.control.fidcomp.FidelityComputer` classes for
details).
fid_params : dict
Parameters for the :obj:`~qutip.control.fidcomp.FidelityComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
phase_option : string
Deprecated. Pass in ``fid_params`` instead.
fid_err_scale_factor : float
Deprecated. Use ``scale_factor`` key in ``fid_params`` instead.
tslot_type : string
Method for computing the dynamics generators, propagators and evolution
in the timeslots. Options: DEF, UPDATE_ALL, DYNAMIC. UPDATE_ALL is
the only one that currently works.
(See :obj:`~qutip.control.tslotcomp.TimeslotComputer` classes for
details.)
tslot_params : dict
Parameters for the :obj:`~qutip.control.tslotcomp.TimeslotComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
amp_update_mode : string
Deprecated. Use ``tslot_type`` instead.
init_pulse_type : string
Type / shape of pulse(s) used to initialise the control amplitudes.
Options (GRAPE) include: RND, LIN, ZERO, SINE, SQUARE, TRIANGLE, SAW.
Default is RND. (see :obj:`~qutip.control.pulsegen.PulseGen` classes
for details). For the CRAB the this the ``guess_pulse_type``.
init_pulse_params : dict
Parameters for the initial / guess pulse generator object.
The key value pairs are assumed to be attribute name value pairs.
They applied after the object is created.
pulse_scaling : float
Linear scale factor for generated initial / guess pulses. By default
initial pulses are generated with amplitudes in the range (-1.0, 1.0).
These will be scaled by this parameter.
pulse_offset : float
Linear offset for the pulse. That is this value will be added to any
initial / guess pulses generated.
ramping_pulse_type : string
Type of pulse used to modulate the control pulse. It's intended use
for a ramping modulation, which is often required in experimental
setups. This is only currently implemented in CRAB. GAUSSIAN_EDGE was
added for this purpose.
ramping_pulse_params : dict
Parameters for the ramping pulse generator object. The key value pairs
are assumed to be attribute name value pairs. They applied after the
object is created.
log_level : integer
Level of messaging output from the logger. Options are attributes of
:obj:`qutip.logging_utils`, in decreasing levels of messaging, are:
DEBUG_INTENSE, DEBUG_VERBOSE, DEBUG, INFO, WARN, ERROR, CRITICAL.
Anything WARN or above is effectively 'quiet' execution, assuming
everything runs as expected. The default NOTSET implies that the level
will be taken from the QuTiP settings file, which by default is WARN.
out_file_ext : string or None
Files containing the initial and final control pulse amplitudes are
saved to the current directory. The default name will be postfixed
with this extension. Setting this to None will suppress the output of
files.
gen_stats : boolean
If set to True then statistics for the optimisation run will be
generated - accessible through attributes of the stats object.
Returns
-------
opt : OptimResult
Returns instance of :obj:`~qutip.control.optimresult.OptimResult`,
which has attributes giving the reason for termination, final fidelity
error, final evolution final amplitudes, statistics etc.
"""
if log_level == logging.NOTSET:
log_level = logger.getEffectiveLevel()
else:
logger.setLevel(log_level)
# The parameters types are checked in create_pulse_optimizer
# so no need to do so here
# However, the deprecation management is repeated here
# so that the stack level is correct
if not optim_alg is None:
optim_method = optim_alg
_param_deprecation(
"The 'optim_alg' parameter is deprecated. "
"Use 'optim_method' instead")
if not max_metric_corr is None:
if isinstance(method_params, dict):
if not 'max_metric_corr' in method_params:
method_params['max_metric_corr'] = max_metric_corr
else:
method_params = {'max_metric_corr':max_metric_corr}
_param_deprecation(
"The 'max_metric_corr' parameter is deprecated. "
"Use 'max_metric_corr' in method_params instead")
if not accuracy_factor is None:
if isinstance(method_params, dict):
if not 'accuracy_factor' in method_params:
method_params['accuracy_factor'] = accuracy_factor
else:
method_params = {'accuracy_factor':accuracy_factor}
_param_deprecation(
"The 'accuracy_factor' parameter is deprecated. "
"Use 'accuracy_factor' in method_params instead")
# phase_option
if not phase_option is None:
if isinstance(fid_params, dict):
if not 'phase_option' in fid_params:
fid_params['phase_option'] = phase_option
else:
fid_params = {'phase_option':phase_option}
_param_deprecation(
"The 'phase_option' parameter is deprecated. "
"Use 'phase_option' in fid_params instead")
# fid_err_scale_factor
if not fid_err_scale_factor is None:
if isinstance(fid_params, dict):
if not 'fid_err_scale_factor' in fid_params:
fid_params['scale_factor'] = fid_err_scale_factor
else:
fid_params = {'scale_factor':fid_err_scale_factor}
_param_deprecation(
"The 'fid_err_scale_factor' parameter is deprecated. "
"Use 'scale_factor' in fid_params instead")
# amp_update_mode
if not amp_update_mode is None:
amp_update_mode_up = _upper_safe(amp_update_mode)
if amp_update_mode_up == 'ALL':
tslot_type = 'UPDATE_ALL'
else:
tslot_type = amp_update_mode
_param_deprecation(
"The 'amp_update_mode' parameter is deprecated. "
"Use 'tslot_type' instead")
optim = create_pulse_optimizer(
drift, ctrls, initial, target,
num_tslots=num_tslots, evo_time=evo_time, tau=tau,
amp_lbound=amp_lbound, amp_ubound=amp_ubound,
fid_err_targ=fid_err_targ, min_grad=min_grad,
max_iter=max_iter, max_wall_time=max_wall_time,
alg=alg, alg_params=alg_params, optim_params=optim_params,
optim_method=optim_method, method_params=method_params,
dyn_type=dyn_type, dyn_params=dyn_params,
prop_type=prop_type, prop_params=prop_params,
fid_type=fid_type, fid_params=fid_params,
init_pulse_type=init_pulse_type, init_pulse_params=init_pulse_params,
pulse_scaling=pulse_scaling, pulse_offset=pulse_offset,
ramping_pulse_type=ramping_pulse_type,
ramping_pulse_params=ramping_pulse_params,
log_level=log_level, gen_stats=gen_stats)
dyn = optim.dynamics
dyn.init_timeslots()
# Generate initial pulses for each control
init_amps = np.zeros([dyn.num_tslots, dyn.num_ctrls])
if alg == 'CRAB':
for j in range(dyn.num_ctrls):
pgen = optim.pulse_generator[j]
pgen.init_pulse()
init_amps[:, j] = pgen.gen_pulse()
else:
pgen = optim.pulse_generator
for j in range(dyn.num_ctrls):
init_amps[:, j] = pgen.gen_pulse()
# Initialise the starting amplitudes
dyn.initialize_controls(init_amps)
if log_level <= logging.INFO:
msg = "System configuration:\n"
dg_name = "dynamics generator"
if dyn_type == 'UNIT':
dg_name = "Hamiltonian"
if dyn.time_depend_drift:
msg += "Initial drift {}:\n".format(dg_name)
msg += str(dyn.drift_dyn_gen[0])
else:
msg += "Drift {}:\n".format(dg_name)
msg += str(dyn.drift_dyn_gen)
for j in range(dyn.num_ctrls):
msg += "\nControl {} {}:\n".format(j+1, dg_name)
msg += str(dyn.ctrl_dyn_gen[j])
msg += "\nInitial state / operator:\n"
msg += str(dyn.initial)
msg += "\nTarget state / operator:\n"
msg += str(dyn.target)
logger.info(msg)
if out_file_ext is not None:
# Save initial amplitudes to a text file
pulsefile = "ctrl_amps_initial_" + out_file_ext
dyn.save_amps(pulsefile)
if log_level <= logging.INFO:
logger.info("Initial amplitudes output to file: " + pulsefile)
# Start the optimisation
result = optim.run_optimization()
if out_file_ext is not None:
# Save final amplitudes to a text file
pulsefile = "ctrl_amps_final_" + out_file_ext
dyn.save_amps(pulsefile)
if log_level <= logging.INFO:
logger.info("Final amplitudes output to file: " + pulsefile)
return result
def optimize_pulse_unitary(
H_d, H_c, U_0, U_targ,
num_tslots=None, evo_time=None, tau=None,
amp_lbound=None, amp_ubound=None,
fid_err_targ=1e-10, min_grad=1e-10,
max_iter=500, max_wall_time=180,
alg='GRAPE', alg_params=None,
optim_params=None, optim_method='DEF', method_params=None,
optim_alg=None, max_metric_corr=None, accuracy_factor=None,
phase_option='PSU',
dyn_params=None, prop_params=None, fid_params=None,
tslot_type='DEF', tslot_params=None,
amp_update_mode=None,
init_pulse_type='DEF', init_pulse_params=None,
pulse_scaling=1.0, pulse_offset=0.0,
ramping_pulse_type=None, ramping_pulse_params=None,
log_level=logging.NOTSET, out_file_ext=None, gen_stats=False):
"""
Optimise a control pulse to minimise the fidelity error, assuming that the
dynamics of the system are generated by unitary operators. This function
is simply a wrapper for optimize_pulse, where the appropriate options for
unitary dynamics are chosen and the parameter names are in the format
familiar to unitary dynamics The dynamics of the system in any given
timeslot are governed by the combined Hamiltonian, i.e. the sum of the
``H_d + ctrl_amp[j]*H_c[j]`` The control pulse is an ``[n_ts, n_ctrls]``
array of piecewise amplitudes Starting from an initial (typically random)
pulse, a multivariable optimisation algorithm attempts to determines the
optimal values for the control pulse to minimise the fidelity error The
maximum fidelity for a unitary system is 1, i.e. when the time evolution
resulting from the pulse is equivalent to the target. And therefore the
fidelity error is ``1 - fidelity``.
Parameters
----------
H_d : Qobj or list of Qobj
Drift (aka system) the underlying Hamiltonian of the system can provide
list (of length ``num_tslots``) for time dependent drift.
H_c : List of Qobj or array like [num_tslots, evo_time]
A list of control Hamiltonians. These are scaled by the amplitudes to
alter the overall dynamics. Array-like input can be provided for time
dependent control generators.
U_0 : Qobj
Starting point for the evolution. Typically the identity matrix.
U_targ : Qobj
Target transformation, e.g. gate or state, for the time evolution.
num_tslots : integer or None
Number of timeslots. ``None`` implies that timeslots will be given in
the tau array.
evo_time : float or None
Total time for the evolution. ``None`` implies that timeslots will be
given in the tau array.
tau : array[num_tslots] of floats or None
Durations for the timeslots. If this is given then ``num_tslots`` and
``evo_time`` are derived from it. ``None`` implies that timeslot
durations will be equal and calculated as ``evo_time/num_tslots``.
amp_lbound : float or list of floats
Lower boundaries for the control amplitudes. Can be a scalar value
applied to all controls or a list of bounds for each control.
amp_ubound : float or list of floats
Upper boundaries for the control amplitudes. Can be a scalar value
applied to all controls or a list of bounds for each control.
fid_err_targ : float
Fidelity error target. Pulse optimisation will terminate when the
fidelity error falls below this value.
mim_grad : float
Minimum gradient. When the sum of the squares of the gradients wrt to
the control amplitudes falls below this value, the optimisation
terminates, assuming local minima.
max_iter : integer
Maximum number of iterations of the optimisation algorithm.
max_wall_time : float
Maximum allowed elapsed time for the optimisation algorithm.
alg : string
Algorithm to use in pulse optimisation. Options are:
- 'GRAPE' (default) - GRadient Ascent Pulse Engineering
- 'CRAB' - Chopped RAndom Basis
alg_params : Dictionary
options that are specific to the algorithm see above
optim_params : Dictionary
The key value pairs are the attribute name and value used to set
attribute values. Note: attributes are created if they do not exist
already, and are overwritten if they do. Note: ``method_params`` are
applied afterwards and so may override these.
optim_method : string
A ``scipy.optimize.minimize`` method that will be used to optimise the
pulse for minimum fidelity error Note that ``FMIN``, ``FMIN_BFGS`` &
``FMIN_L_BFGS_B`` will all result in calling these specific
scipy.optimize methods Note the ``LBFGSB`` is equivalent to
``FMIN_L_BFGS_B`` for backwards compatibility reasons. Supplying
``DEF`` will given algorithm-dependent result:
- GRAPE - Default ``optim_method`` is FMIN_L_BFGS_B
- CRAB - Default ``optim_method`` is FMIN
method_params : dict
Parameters for the ``optim_method``. Note that where there is an
attribute of the :obj:`~qutip.control.optimizer.Optimizer` object or
the ``termination_conditions`` matching the key that attribute.
Otherwise, and in some case also, they are assumed to be
method_options for the ``scipy.optimize.minimize`` method.
optim_alg : string
Deprecated. Use ``optim_method``.
max_metric_corr : integer
Deprecated. Use ``method_params`` instead.
accuracy_factor : float
Deprecated. Use ``method_params`` instead.
phase_option : string
Determines how global phase is treated in fidelity calculations
(``fid_type='UNIT'`` only). Options:
- PSU - global phase ignored
- SU - global phase included
dyn_params : dict
Parameters for the :obj:`~qutip.control.dynamics.Dynamics` object.
The key value pairs are assumed to be attribute name value pairs.
They applied after the object is created.
prop_params : dict
Parameters for the :obj:`~qutip.control.propcomp.PropagatorComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
fid_params : dict
Parameters for the :obj:`~qutip.control.fidcomp.FidelityComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
tslot_type : string
Method for computing the dynamics generators, propagators and evolution
in the timeslots. Options: ``DEF``, ``UPDATE_ALL``, ``DYNAMIC``.
``UPDATE_ALL`` is the only one that currently works. (See
:obj:`~qutip.control.tslotcomp.TimeslotComputer` classes for details.)
tslot_params : dict
Parameters for the :obj:`~qutip.control.tslotcomp.TimeslotComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
amp_update_mode : string
Deprecated. Use ``tslot_type`` instead.
init_pulse_type : string
Type / shape of pulse(s) used to initialise the control amplitudes.
Options (GRAPE) include: RND, LIN, ZERO, SINE, SQUARE, TRIANGLE, SAW.
DEF is RND. (see :obj:`~qutip.control.pulsegen.PulseGen` classes for
details.) For the CRAB the this the guess_pulse_type.
init_pulse_params : dict
Parameters for the initial / guess pulse generator object. The key
value pairs are assumed to be attribute name value pairs. They applied
after the object is created.
pulse_scaling : float
Linear scale factor for generated initial / guess pulses. By default
initial pulses are generated with amplitudes in the range (-1.0, 1.0).
These will be scaled by this parameter.
pulse_offset : float
Linear offset for the pulse. That is this value will be added to any
initial / guess pulses generated.
ramping_pulse_type : string
Type of pulse used to modulate the control pulse. It's intended use
for a ramping modulation, which is often required in experimental
setups. This is only currently implemented in CRAB. GAUSSIAN_EDGE was
added for this purpose.
ramping_pulse_params : dict
Parameters for the ramping pulse generator object. The key value pairs
are assumed to be attribute name value pairs. They applied after the
object is created.
log_level : integer
Level of messaging output from the logger. Options are attributes of
:obj:`qutip.logging_utils` in decreasing levels of messaging, are:
DEBUG_INTENSE, DEBUG_VERBOSE, DEBUG, INFO, WARN, ERROR, CRITICAL
Anything WARN or above is effectively 'quiet' execution, assuming
everything runs as expected. The default NOTSET implies that the level
will be taken from the QuTiP settings file, which by default is WARN.
out_file_ext : string or None
Files containing the initial and final control pulse amplitudes are
saved to the current directory. The default name will be postfixed
with this extension. Setting this to ``None`` will suppress the output
of files.
gen_stats : boolean
If set to ``True`` then statistics for the optimisation run will be
generated - accessible through attributes of the stats object.
Returns
-------
opt : OptimResult
Returns instance of :obj:`~qutip.control.optimresult.OptimResult`,
which has attributes giving the reason for termination, final fidelity
error, final evolution final amplitudes, statistics etc.
"""
# parameters are checked in create pulse optimiser
# The deprecation management is repeated here
# so that the stack level is correct
if not optim_alg is None:
optim_method = optim_alg
_param_deprecation(
"The 'optim_alg' parameter is deprecated. "
"Use 'optim_method' instead")
if not max_metric_corr is None:
if isinstance(method_params, dict):
if not 'max_metric_corr' in method_params:
method_params['max_metric_corr'] = max_metric_corr
else:
method_params = {'max_metric_corr':max_metric_corr}
_param_deprecation(
"The 'max_metric_corr' parameter is deprecated. "
"Use 'max_metric_corr' in method_params instead")
if not accuracy_factor is None:
if isinstance(method_params, dict):
if not 'accuracy_factor' in method_params:
method_params['accuracy_factor'] = accuracy_factor
else:
method_params = {'accuracy_factor':accuracy_factor}
_param_deprecation(
"The 'accuracy_factor' parameter is deprecated. "
"Use 'accuracy_factor' in method_params instead")
# amp_update_mode
if not amp_update_mode is None:
amp_update_mode_up = _upper_safe(amp_update_mode)
if amp_update_mode_up == 'ALL':
tslot_type = 'UPDATE_ALL'
else:
tslot_type = amp_update_mode
_param_deprecation(
"The 'amp_update_mode' parameter is deprecated. "
"Use 'tslot_type' instead")
# phase_option is still valid for this method
# pass it via the fid_params
if not phase_option is None:
if fid_params is None:
fid_params = {'phase_option':phase_option}
else:
if not 'phase_option' in fid_params:
fid_params['phase_option'] = phase_option
return optimize_pulse(
drift=H_d, ctrls=H_c, initial=U_0, target=U_targ,
num_tslots=num_tslots, evo_time=evo_time, tau=tau,
amp_lbound=amp_lbound, amp_ubound=amp_ubound,
fid_err_targ=fid_err_targ, min_grad=min_grad,
max_iter=max_iter, max_wall_time=max_wall_time,
alg=alg, alg_params=alg_params, optim_params=optim_params,
optim_method=optim_method, method_params=method_params,
dyn_type='UNIT', dyn_params=dyn_params,
prop_params=prop_params, fid_params=fid_params,
init_pulse_type=init_pulse_type,
init_pulse_params=init_pulse_params,
pulse_scaling=pulse_scaling, pulse_offset=pulse_offset,
ramping_pulse_type=ramping_pulse_type,
ramping_pulse_params=ramping_pulse_params,
log_level=log_level, out_file_ext=out_file_ext,
gen_stats=gen_stats)
def opt_pulse_crab(
drift, ctrls, initial, target,
num_tslots=None, evo_time=None, tau=None,
amp_lbound=None, amp_ubound=None,
fid_err_targ=1e-5,
max_iter=500, max_wall_time=180,
alg_params=None,
num_coeffs=None, init_coeff_scaling=1.0,
optim_params=None, optim_method='fmin', method_params=None,
dyn_type='GEN_MAT', dyn_params=None,
prop_type='DEF', prop_params=None,
fid_type='DEF', fid_params=None,
tslot_type='DEF', tslot_params=None,
guess_pulse_type=None, guess_pulse_params=None,
guess_pulse_scaling=1.0, guess_pulse_offset=0.0,
guess_pulse_action='MODULATE',
ramping_pulse_type=None, ramping_pulse_params=None,
log_level=logging.NOTSET, out_file_ext=None, gen_stats=False):
"""
Optimise a control pulse to minimise the fidelity error.
The dynamics of the system in any given timeslot are governed
by the combined dynamics generator,
i.e. the sum of the drift+ctrl_amp[j]*ctrls[j]
The control pulse is an [n_ts, n_ctrls] array of piecewise amplitudes.
The CRAB algorithm uses basis function coefficents as the variables to
optimise. It does NOT use any gradient function.
A multivariable optimisation algorithm attempts to determines the
optimal values for the control pulse to minimise the fidelity error
The fidelity error is some measure of distance of the system evolution
from the given target evolution in the time allowed for the evolution.
Parameters
----------
drift : Qobj or list of Qobj
the underlying dynamics generator of the system
can provide list (of length num_tslots) for time dependent drift
ctrls : List of Qobj or array like [num_tslots, evo_time]
a list of control dynamics generators. These are scaled by
the amplitudes to alter the overall dynamics
Array like imput can be provided for time dependent control generators
initial : Qobj
Starting point for the evolution. Typically the identity matrix.
target : Qobj
Target transformation, e.g. gate or state, for the time evolution.
num_tslots : integer or None
Number of timeslots. ``None`` implies that timeslots will be given in
the tau array.
evo_time : float or None
Total time for the evolution. ``None`` implies that timeslots will be
given in the tau array.
tau : array[num_tslots] of floats or None
Durations for the timeslots. If this is given then ``num_tslots`` and
``evo_time`` are dervived from it.
``None`` implies that timeslot durations will be equal and calculated
as ``evo_time/num_tslots``.
amp_lbound : float or list of floats
Lower boundaries for the control amplitudes. Can be a scalar value
applied to all controls or a list of bounds for each control.
amp_ubound : float or list of floats
Upper boundaries for the control amplitudes. Can be a scalar value
applied to all controls or a list of bounds for each control.
fid_err_targ : float
Fidelity error target. Pulse optimisation will terminate when the
fidelity error falls below this value.
max_iter : integer
Maximum number of iterations of the optimisation algorithm.
max_wall_time : float
Maximum allowed elapsed time for the optimisation algorithm.
alg_params : Dictionary
Options that are specific to the algorithm see above.
optim_params : Dictionary
The key value pairs are the attribute name and value used to set
attribute values. Note: attributes are created if they do not exist
already, and are overwritten if they do. Note: method_params are
applied afterwards and so may override these.
coeff_scaling : float
Linear scale factor for the random basis coefficients. By default
these range from -1.0 to 1.0. Note this is overridden by alg_params
(if given there).
num_coeffs : integer
Number of coefficients used for each basis function. Note this is
calculated automatically based on the dimension of the dynamics if not
given. It is crucial to the performane of the algorithm that it is set
as low as possible, while still giving high enough frequencies. Note
this is overridden by alg_params (if given there).
optim_method : string
Multi-variable optimisation method. The only tested options are 'fmin'
and 'Nelder-mead'. In theory any non-gradient method implemented in
scipy.optimize.mininize could be used.
method_params : dict
Parameters for the optim_method. Note that where there is an attribute
of the :class:`~qutip.control.optimizer.Optimizer` object or the
termination_conditions matching the key that attribute. Otherwise,
and in some case also, they are assumed to be method_options for the
``scipy.optimize.minimize`` method. The commonly used parameter are:
- xtol - limit on variable change for convergence
- ftol - limit on fidelity error change for convergence
dyn_type : string
Dynamics type, i.e. the type of matrix used to describe the dynamics.
Options are UNIT, GEN_MAT, SYMPL (see Dynamics classes for details).
dyn_params : dict
Parameters for the :class:`qutip.control.dynamics.Dynamics` object.
The key value pairs are assumed to be attribute name value pairs.
They applied after the object is created.
prop_type : string
Propagator type i.e. the method used to calculate the propagtors and
propagtor gradient for each timeslot options are DEF, APPROX, DIAG,
FRECHET, AUG_MAT DEF will use the default for the specific dyn_type
(see :obj:`~qutip.control.propcomp.PropagatorComputer` classes for
details).
prop_params : dict
Parameters for the :obj:`~qutip.control.propcomp.PropagatorComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
fid_type : string
Fidelity error (and fidelity error gradient) computation method.
Options are DEF, UNIT, TRACEDIFF, TD_APPROX. DEF will use the default
for the specific dyn_type.
(See :obj:`~qutip.control.fidcomp.FidelityComputer` classes for
details).
fid_params : dict
Parameters for the :obj:`~qutip.control.fidcomp.FidelityComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
tslot_type : string
Method for computing the dynamics generators, propagators and evolution
in the timeslots. Options: DEF, UPDATE_ALL, DYNAMIC UPDATE_ALL is the
only one that currently works.
(See :obj:`~qutip.control.tslotcomp.TimeslotComputer` classes
for details).
tslot_params : dict
Parameters for the :obj:`~qutip.control.tslotcomp.TimeslotComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
guess_pulse_type : string, default None
Type / shape of pulse(s) used modulate the control amplitudes.
Options include: RND, LIN, ZERO, SINE, SQUARE, TRIANGLE, SAW, GAUSSIAN.
guess_pulse_params : dict
Parameters for the guess pulse generator object. The key value pairs
are assumed to be attribute name value pairs. They applied after the
object is created.
guess_pulse_action : string, default 'MODULATE'
Determines how the guess pulse is applied to the pulse generated by the
basis expansion. Options are: MODULATE, ADD.
pulse_scaling : float
Linear scale factor for generated guess pulses. By default initial
pulses are generated with amplitudes in the range (-1.0, 1.0). These
will be scaled by this parameter.
pulse_offset : float
Linear offset for the pulse. That is this value will be added to any
guess pulses generated.
ramping_pulse_type : string
Type of pulse used to modulate the control pulse. It's intended use
for a ramping modulation, which is often required in experimental
setups. This is only currently implemented in CRAB. GAUSSIAN_EDGE was
added for this purpose.
ramping_pulse_params : dict
Parameters for the ramping pulse generator object. The key value pairs
are assumed to be attribute name value pairs. They applied after the
object is created.
log_level : integer
level of messaging output from the logger. Options are attributes of
:obj:`qutip.logging_utils`, in decreasing levels of messaging, are:
DEBUG_INTENSE, DEBUG_VERBOSE, DEBUG, INFO, WARN, ERROR, CRITICAL
Anything WARN or above is effectively 'quiet' execution, assuming
everything runs as expected. The default NOTSET implies that the level
will be taken from the QuTiP settings file, which by default is WARN.
out_file_ext : string or None
Files containing the initial and final control pulse. Amplitudes are
saved to the current directory. The default name will be postfixed
with this extension. Setting this to ``None`` will suppress the output
of files.
gen_stats : boolean
If set to ``True`` then statistics for the optimisation run will be
generated - accessible through attributes of the stats object.
Returns
-------
opt : OptimResult
Returns instance of OptimResult, which has attributes giving the
reason for termination, final fidelity error, final evolution
final amplitudes, statistics etc
"""
# The parameters are checked in create_pulse_optimizer
# so no need to do so here
if log_level == logging.NOTSET:
log_level = logger.getEffectiveLevel()
else:
logger.setLevel(log_level)
# build the algorithm options
if not isinstance(alg_params, dict):
alg_params = {'num_coeffs': num_coeffs,
'init_coeff_scaling': init_coeff_scaling}
else:
if (num_coeffs is not None and
not 'num_coeffs' in alg_params):
alg_params['num_coeffs'] = num_coeffs
if (init_coeff_scaling is not None and
not 'init_coeff_scaling' in alg_params):
alg_params['init_coeff_scaling'] = init_coeff_scaling
# Build the guess pulse options
# Any options passed in the guess_pulse_params take precedence
# over the parameter values.
if guess_pulse_type:
if not isinstance(guess_pulse_params, dict):
guess_pulse_params = {}
if (guess_pulse_scaling is not None and
not 'scaling' in guess_pulse_params):
guess_pulse_params['scaling'] = guess_pulse_scaling
if (guess_pulse_offset is not None and
not 'offset' in guess_pulse_params):
guess_pulse_params['offset'] = guess_pulse_offset
if (guess_pulse_action is not None and
not 'pulse_action' in guess_pulse_params):
guess_pulse_params['pulse_action'] = guess_pulse_action
return optimize_pulse(
drift, ctrls, initial, target,
num_tslots=num_tslots, evo_time=evo_time, tau=tau,
amp_lbound=amp_lbound, amp_ubound=amp_ubound,
fid_err_targ=fid_err_targ, min_grad=0.0,
max_iter=max_iter, max_wall_time=max_wall_time,
alg='CRAB', alg_params=alg_params, optim_params=optim_params,
optim_method=optim_method, method_params=method_params,
dyn_type=dyn_type, dyn_params=dyn_params,
prop_type=prop_type, prop_params=prop_params,
fid_type=fid_type, fid_params=fid_params,
tslot_type=tslot_type, tslot_params=tslot_params,
init_pulse_type=guess_pulse_type,
init_pulse_params=guess_pulse_params,
ramping_pulse_type=ramping_pulse_type,
ramping_pulse_params=ramping_pulse_params,
log_level=log_level, out_file_ext=out_file_ext, gen_stats=gen_stats)
def opt_pulse_crab_unitary(
H_d, H_c, U_0, U_targ,
num_tslots=None, evo_time=None, tau=None,
amp_lbound=None, amp_ubound=None,
fid_err_targ=1e-5,
max_iter=500, max_wall_time=180,
alg_params=None,
num_coeffs=None, init_coeff_scaling=1.0,
optim_params=None, optim_method='fmin', method_params=None,
phase_option='PSU',
dyn_params=None, prop_params=None, fid_params=None,
tslot_type='DEF', tslot_params=None,
guess_pulse_type=None, guess_pulse_params=None,
guess_pulse_scaling=1.0, guess_pulse_offset=0.0,
guess_pulse_action='MODULATE',
ramping_pulse_type=None, ramping_pulse_params=None,
log_level=logging.NOTSET, out_file_ext=None, gen_stats=False):
"""
Optimise a control pulse to minimise the fidelity error, assuming that the
dynamics of the system are generated by unitary operators. This function
is simply a wrapper for optimize_pulse, where the appropriate options for
unitary dynamics are chosen and the parameter names are in the format
familiar to unitary dynamics. The dynamics of the system in any given
timeslot are governed by the combined Hamiltonian, i.e. the sum of the
``H_d + ctrl_amp[j]*H_c[j]`` The control pulse is an ``[n_ts, n_ctrls]``
array of piecewise amplitudes.
The CRAB algorithm uses basis function coefficents as the variables to
optimise. It does NOT use any gradient function. A multivariable
optimisation algorithm attempts to determines the optimal values for the
control pulse to minimise the fidelity error. The fidelity error is some
measure of distance of the system evolution from the given target evolution
in the time allowed for the evolution.
Parameters
----------
H_d : Qobj or list of Qobj
Drift (aka system) the underlying Hamiltonian of the system can provide
list (of length num_tslots) for time dependent drift.
H_c : List of Qobj or array like [num_tslots, evo_time]
A list of control Hamiltonians. These are scaled by the amplitudes to
alter the overall dynamics. Array like imput can be provided for time
dependent control generators.
U_0 : Qobj
Starting point for the evolution. Typically the identity matrix.
U_targ : Qobj
Target transformation, e.g. gate or state, for the time evolution.
num_tslots : integer or None
Number of timeslots. ``None`` implies that timeslots will be given in
the tau array.
evo_time : float or None
Total time for the evolution. ``None`` implies that timeslots will be
given in the tau array.
tau : array[num_tslots] of floats or None
Durations for the timeslots. If this is given then ``num_tslots`` and
``evo_time`` are derived from it. ``None`` implies that timeslot
durations will be equal and calculated as ``evo_time/num_tslots``.
amp_lbound : float or list of floats
Lower boundaries for the control amplitudes. Can be a scalar value
applied to all controls or a list of bounds for each control.
amp_ubound : float or list of floats
Upper boundaries for the control amplitudes. Can be a scalar value
applied to all controls or a list of bounds for each control.
fid_err_targ : float
Fidelity error target. Pulse optimisation will terminate when the
fidelity error falls below this value.
max_iter : integer
Maximum number of iterations of the optimisation algorithm.
max_wall_time : float
Maximum allowed elapsed time for the optimisation algorithm.
alg_params : Dictionary
Options that are specific to the algorithm see above.
optim_params : Dictionary
The key value pairs are the attribute name and value used to set
attribute values. Note: attributes are created if they do not exist
already, and are overwritten if they do. Note: ``method_params`` are
applied afterwards and so may override these.
coeff_scaling : float
Linear scale factor for the random basis coefficients. By default
these range from -1.0 to 1.0. Note this is overridden by
``alg_params`` (if given there).
num_coeffs : integer
Number of coefficients used for each basis function. Note this is
calculated automatically based on the dimension of the dynamics if not
given. It is crucial to the performance of the algorithm that it is set
as low as possible, while still giving high enough frequencies. Note
this is overridden by ``alg_params`` (if given there).
optim_method : string
Multi-variable optimisation method. The only tested options are 'fmin'
and 'Nelder-mead'. In theory any non-gradient method implemented in
``scipy.optimize.minimize`` could be used.
method_params : dict
Parameters for the ``optim_method``. Note that where there is an
attribute of the :obj:`~qutip.control.optimizer.Optimizer` object or
the termination_conditions matching the key that attribute. Otherwise,
and in some case also, they are assumed to be method_options for the
``scipy.optimize.minimize`` method. The commonly used parameter are:
- xtol - limit on variable change for convergence
- ftol - limit on fidelity error change for convergence
phase_option : string
Determines how global phase is treated in fidelity calculations
(``fid_type='UNIT'`` only). Options:
- PSU - global phase ignored
- SU - global phase included
dyn_params : dict
Parameters for the :obj:`~qutip.control.dynamics.Dynamics` object.
The key value pairs are assumed to be attribute name value pairs.
They applied after the object is created.
prop_params : dict
Parameters for the :obj:`~qutip.control.propcomp.PropagatorComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
fid_params : dict
Parameters for the :obj:`~qutip.control.fidcomp.FidelityComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
tslot_type : string
Method for computing the dynamics generators, propagators and evolution
in the timeslots. Options: DEF, UPDATE_ALL, DYNAMIC. UPDATE_ALL is
the only one that currently works.
(See :obj:`~qutip.control.tslotcomp.TimeslotComputer` classes for
details).
tslot_params : dict
Parameters for the :obj:`~qutip.control.tslotcomp.TimeslotComputer`
object. The key value pairs are assumed to be attribute name value
pairs. They applied after the object is created.
guess_pulse_type : string, optional
Type / shape of pulse(s) used modulate the control amplitudes.
Options include: RND, LIN, ZERO, SINE, SQUARE, TRIANGLE, SAW, GAUSSIAN.
guess_pulse_params : dict
Parameters for the guess pulse generator object. The key value pairs
are assumed to be attribute name value pairs. They applied after the
object is created.
guess_pulse_action : string, 'MODULATE'
Determines how the guess pulse is applied to the pulse generated by the
basis expansion. Options are: MODULATE, ADD.
pulse_scaling : float
Linear scale factor for generated guess pulses. By default initial
pulses are generated with amplitudes in the range (-1.0, 1.0). These
will be scaled by this parameter.
pulse_offset : float
Linear offset for the pulse. That is this value will be added to any
guess pulses generated.
ramping_pulse_type : string
Type of pulse used to modulate the control pulse. It's intended use
for a ramping modulation, which is often required in experimental
setups. This is only currently implemented in CRAB. GAUSSIAN_EDGE was
added for this purpose.
ramping_pulse_params : dict
Parameters for the ramping pulse generator object. The key value pairs
are assumed to be attribute name value pairs. They applied after the
object is created.
log_level : integer
Level of messaging output from the logger. Options are attributes of
:obj:`qutip.logging_utils`, in decreasing levels of messaging, are:
DEBUG_INTENSE, DEBUG_VERBOSE, DEBUG, INFO, WARN, ERROR, CRITICAL.
Anything WARN or above is effectively 'quiet' execution, assuming
everything runs as expected. The default NOTSET implies that the level
will be taken from the QuTiP settings file, which by default is WARN.
out_file_ext : string or None
Files containing the initial and final control pulse amplitudes are
saved to the current directory. The default name will be postfixed
with this extension. Setting this to None will suppress the output of
files.
gen_stats : boolean
If set to ``True`` then statistics for the optimisation run will be
generated - accessible through attributes of the stats object.
Returns
-------
opt : OptimResult
Returns instance of :obj:`~qutip.control.optimresult.OptimResult`,
which has attributes giving the reason for termination, final fidelity
error, final evolution final amplitudes, statistics etc.
"""
# The parameters are checked in create_pulse_optimizer
# so no need to do so here
if log_level == logging.NOTSET:
log_level = logger.getEffectiveLevel()
else:
logger.setLevel(log_level)
# build the algorithm options
if not isinstance(alg_params, dict):
alg_params = {'num_coeffs':num_coeffs,
'init_coeff_scaling':init_coeff_scaling}
else:
if (num_coeffs is not None and
not 'num_coeffs' in alg_params):
alg_params['num_coeffs'] = num_coeffs
if (init_coeff_scaling is not None and
not 'init_coeff_scaling' in alg_params):
alg_params['init_coeff_scaling'] = init_coeff_scaling
# Build the guess pulse options
# Any options passed in the guess_pulse_params take precedence
# over the parameter values.
if guess_pulse_type:
if not isinstance(guess_pulse_params, dict):
guess_pulse_params = {}
if (guess_pulse_scaling is not None and
not 'scaling' in guess_pulse_params):
guess_pulse_params['scaling'] = guess_pulse_scaling
if (guess_pulse_offset is not None and
not 'offset' in guess_pulse_params):
guess_pulse_params['offset'] = guess_pulse_offset
if (guess_pulse_action is not None and
not 'pulse_action' in guess_pulse_params):
guess_pulse_params['pulse_action'] = guess_pulse_action
return optimize_pulse_unitary(
H_d, H_c, U_0, U_targ,
num_tslots=num_tslots, evo_time=evo_time, tau=tau,
amp_lbound=amp_lbound, amp_ubound=amp_ubound,
fid_err_targ=fid_err_targ, min_grad=0.0,
max_iter=max_iter, max_wall_time=max_wall_time,
alg='CRAB', alg_params=alg_params, optim_params=optim_params,
optim_method=optim_method, method_params=method_params,
phase_option=phase_option,
dyn_params=dyn_params, prop_params=prop_params, fid_params=fid_params,
tslot_type=tslot_type, tslot_params=tslot_params,
init_pulse_type=guess_pulse_type,
init_pulse_params=guess_pulse_params,
ramping_pulse_type=ramping_pulse_type,
ramping_pulse_params=ramping_pulse_params,
log_level=log_level, out_file_ext=out_file_ext, gen_stats=gen_stats)
def create_pulse_optimizer(
drift, ctrls, initial, target,
num_tslots=None, evo_time=None, tau=None,
amp_lbound=None, amp_ubound=None,
fid_err_targ=1e-10, min_grad=1e-10,
max_iter=500, max_wall_time=180,
alg='GRAPE', alg_params=None,
optim_params=None, optim_method='DEF', method_params=None,
optim_alg=None, max_metric_corr=None, accuracy_factor=None,
dyn_type='GEN_MAT', dyn_params=None,
prop_type='DEF', prop_params=None,
fid_type='DEF', fid_params=None,
phase_option=None, fid_err_scale_factor=None,
tslot_type='DEF', tslot_params=None,
amp_update_mode=None,
init_pulse_type='DEF', init_pulse_params=None,
pulse_scaling=1.0, pulse_offset=0.0,
ramping_pulse_type=None, ramping_pulse_params=None,
log_level=logging.NOTSET, gen_stats=False):
"""
Generate the objects of the appropriate subclasses required for the pulse
optmisation based on the parameters given Note this method may be
preferable to calling optimize_pulse if more detailed configuration is
required before running the optmisation algorthim, or the algorithm will be
run many times, for instances when trying to finding global the optimum or
minimum time optimisation
Parameters
----------
drift : Qobj or list of Qobj
The underlying dynamics generator of the system can provide list (of
length num_tslots) for time dependent drift.
ctrls : List of Qobj or array like [num_tslots, evo_time]
A list of control dynamics generators. These are scaled by the
amplitudes to alter the overall dynamics. Array-like input can be
provided for time dependent control generators.
initial : Qobj
Starting point for the evolution. Typically the identity matrix.
target : Qobj
Target transformation, e.g. gate or state, for the time evolution.
num_tslots : integer or None
Number of timeslots. ``None`` implies that timeslots will be given in
the tau array.
evo_time : float or None
Total time for the evolution. ``None`` implies that timeslots will be
given in the tau array.
tau : array[num_tslots] of floats or None
Durations for the timeslots. If this is given then ``num_tslots`` and
``evo_time`` are dervived from it. ``None`` implies that timeslot
durations will be equal and calculated as ``evo_time/num_tslots``.
amp_lbound : float or list of floats
Lower boundaries for the control amplitudes. Can be a scalar value
applied to all controls or a list of bounds for each control.
amp_ubound : float or list of floats
Upper boundaries for the control amplitudes. Can be a scalar value
applied to all controls or a list of bounds for each control.
fid_err_targ : float
Fidelity error target. Pulse optimisation will terminate when the
fidelity error falls below this value.
mim_grad : float
Minimum gradient. When the sum of the squares of the gradients wrt to
the control amplitudes falls below this value, the optimisation
terminates, assuming local minima.
max_iter : integer
Maximum number of iterations of the optimisation algorithm.
max_wall_time : float
Maximum allowed elapsed time for the optimisation algorithm.
alg : string
Algorithm to use in pulse optimisation.
Options are:
- 'GRAPE' (default) - GRadient Ascent Pulse Engineering
- 'CRAB' - Chopped RAndom Basis
alg_params : Dictionary
options that are specific to the algorithm see above
optim_params : Dictionary
The key value pairs are the attribute name and value used to set
attribute values. Note: attributes are created if they do not exist
already, and are overwritten if they do. Note: method_params are
applied afterwards and so may override these.
optim_method : string
a scipy.optimize.minimize method that will be used to optimise
the pulse for minimum fidelity error
Note that FMIN, FMIN_BFGS & FMIN_L_BFGS_B will all result
in calling these specific scipy.optimize methods
Note the LBFGSB is equivalent to FMIN_L_BFGS_B for backwards
capatibility reasons.
Supplying DEF will given alg dependent result:
- GRAPE - Default optim_method is FMIN_L_BFGS_B
- CRAB - Default optim_method is Nelder-Mead
method_params : dict
Parameters for the optim_method.
Note that where there is an attribute of the
:class:`~qutip.control.optimizer.Optimizer` object or the
termination_conditions matching the key that attribute. Otherwise,
and in some case also, they are assumed to be method_options
for the scipy.optimize.minimize method.
optim_alg : string
Deprecated. Use optim_method.
max_metric_corr : integer
Deprecated. Use method_params instead
accuracy_factor : float
Deprecated. Use method_params instead
dyn_type : string
Dynamics type, i.e. the type of matrix used to describe
the dynamics. Options are UNIT, GEN_MAT, SYMPL
(see Dynamics classes for details)
dyn_params : dict
Parameters for the Dynamics object
The key value pairs are assumed to be attribute name value pairs
They applied after the object is created
prop_type : string
Propagator type i.e. the method used to calculate the
propagtors and propagtor gradient for each timeslot
options are DEF, APPROX, DIAG, FRECHET, AUG_MAT
DEF will use the default for the specific dyn_type
(see PropagatorComputer classes for details)
prop_params : dict
Parameters for the PropagatorComputer object
The key value pairs are assumed to be attribute name value pairs
They applied after the object is created
fid_type : string
Fidelity error (and fidelity error gradient) computation method
Options are DEF, UNIT, TRACEDIFF, TD_APPROX
DEF will use the default for the specific dyn_type
(See FidelityComputer classes for details)
fid_params : dict
Parameters for the FidelityComputer object
The key value pairs are assumed to be attribute name value pairs
They applied after the object is created
phase_option : string
Deprecated. Pass in fid_params instead.
fid_err_scale_factor : float
Deprecated. Use scale_factor key in fid_params instead.
tslot_type : string
Method for computing the dynamics generators, propagators and
evolution in the timeslots.
Options: DEF, UPDATE_ALL, DYNAMIC
UPDATE_ALL is the only one that currently works
(See TimeslotComputer classes for details)
tslot_params : dict
Parameters for the TimeslotComputer object.
The key value pairs are assumed to be attribute name value pairs.
They applied after the object is created.
amp_update_mode : string
Deprecated. Use tslot_type instead.
init_pulse_type : string
type / shape of pulse(s) used to initialise the
the control amplitudes.
Options (GRAPE) include:
RND, LIN, ZERO, SINE, SQUARE, TRIANGLE, SAW
DEF is RND
(see PulseGen classes for details)
For the CRAB the this the guess_pulse_type.
init_pulse_params : dict
Parameters for the initial / guess pulse generator object.
The key value pairs are assumed to be attribute name value pairs.
They applied after the object is created.
pulse_scaling : float
Linear scale factor for generated initial / guess pulses
By default initial pulses are generated with amplitudes in the
range (-1.0, 1.0). These will be scaled by this parameter
pulse_offset : float
Linear offset for the pulse. That is this value will be added
to any initial / guess pulses generated.
ramping_pulse_type : string
Type of pulse used to modulate the control pulse.
It's intended use for a ramping modulation, which is often required in
experimental setups.
This is only currently implemented in CRAB.
GAUSSIAN_EDGE was added for this purpose.
ramping_pulse_params : dict
Parameters for the ramping pulse generator object.
The key value pairs are assumed to be attribute name value pairs.
They applied after the object is created
log_level : integer
level of messaging output from the logger.
Options are attributes of qutip.logging_utils,
in decreasing levels of messaging, are:
DEBUG_INTENSE, DEBUG_VERBOSE, DEBUG, INFO, WARN, ERROR, CRITICAL
Anything WARN or above is effectively 'quiet' execution,
assuming everything runs as expected.
The default NOTSET implies that the level will be taken from
the QuTiP settings file, which by default is WARN
gen_stats : boolean
if set to True then statistics for the optimisation
run will be generated - accessible through attributes
of the stats object
Returns
-------
opt : Optimizer
Instance of an Optimizer, through which the
Config, Dynamics, PulseGen, and TerminationConditions objects
can be accessed as attributes.
The PropagatorComputer, FidelityComputer and TimeslotComputer objects
can be accessed as attributes of the Dynamics object,
e.g. optimizer.dynamics.fid_computer The optimisation can be run
through the optimizer.run_optimization
"""
# check parameters
ctrls = dynamics._check_ctrls_container(ctrls)
dynamics._check_drift_dyn_gen(drift)
if not isinstance(initial, Qobj):
raise TypeError("initial must be a Qobj")
if not isinstance(target, Qobj):
raise TypeError("target must be a Qobj")
# Deprecated parameter management
if not optim_alg is None:
optim_method = optim_alg
_param_deprecation(
"The 'optim_alg' parameter is deprecated. "
"Use 'optim_method' instead")
if not max_metric_corr is None:
if isinstance(method_params, dict):
if not 'max_metric_corr' in method_params:
method_params['max_metric_corr'] = max_metric_corr
else:
method_params = {'max_metric_corr':max_metric_corr}
_param_deprecation(
"The 'max_metric_corr' parameter is deprecated. "
"Use 'max_metric_corr' in method_params instead")
if not accuracy_factor is None:
if isinstance(method_params, dict):
if not 'accuracy_factor' in method_params:
method_params['accuracy_factor'] = accuracy_factor
else:
method_params = {'accuracy_factor':accuracy_factor}
_param_deprecation(
"The 'accuracy_factor' parameter is deprecated. "
"Use 'accuracy_factor' in method_params instead")
# phase_option
if not phase_option is None:
if isinstance(fid_params, dict):
if not 'phase_option' in fid_params:
fid_params['phase_option'] = phase_option
else:
fid_params = {'phase_option':phase_option}
_param_deprecation(
"The 'phase_option' parameter is deprecated. "
"Use 'phase_option' in fid_params instead")
# fid_err_scale_factor
if not fid_err_scale_factor is None:
if isinstance(fid_params, dict):
if not 'fid_err_scale_factor' in fid_params:
fid_params['scale_factor'] = fid_err_scale_factor
else:
fid_params = {'scale_factor':fid_err_scale_factor}
_param_deprecation(
"The 'fid_err_scale_factor' parameter is deprecated. "
"Use 'scale_factor' in fid_params instead")
# amp_update_mode
if not amp_update_mode is None:
amp_update_mode_up = _upper_safe(amp_update_mode)
if amp_update_mode_up == 'ALL':
tslot_type = 'UPDATE_ALL'
else:
tslot_type = amp_update_mode
_param_deprecation(
"The 'amp_update_mode' parameter is deprecated. "
"Use 'tslot_type' instead")
# set algorithm defaults
alg_up = _upper_safe(alg)
if alg is None:
raise errors.UsageError(
"Optimisation algorithm must be specified through 'alg' parameter")
elif alg_up == 'GRAPE':
if optim_method is None or optim_method.upper() == 'DEF':
optim_method = 'FMIN_L_BFGS_B'
if init_pulse_type is None or init_pulse_type.upper() == 'DEF':
init_pulse_type = 'RND'
elif alg_up == 'CRAB':
if optim_method is None or optim_method.upper() == 'DEF':
optim_method = 'FMIN'
if prop_type is None or prop_type.upper() == 'DEF':
prop_type = 'APPROX'
if init_pulse_type is None or init_pulse_type.upper() == 'DEF':
init_pulse_type = None
else:
raise errors.UsageError(
"No option for pulse optimisation algorithm alg={}".format(alg))
cfg = optimconfig.OptimConfig()
cfg.optim_method = optim_method
cfg.dyn_type = dyn_type
cfg.prop_type = prop_type
cfg.fid_type = fid_type
cfg.init_pulse_type = init_pulse_type
if log_level == logging.NOTSET:
log_level = logger.getEffectiveLevel()
else:
logger.setLevel(log_level)
cfg.log_level = log_level
# Create the Dynamics instance
if dyn_type == 'GEN_MAT' or dyn_type is None or dyn_type == '':
dyn = dynamics.DynamicsGenMat(cfg)
elif dyn_type == 'UNIT':
dyn = dynamics.DynamicsUnitary(cfg)
elif dyn_type == 'SYMPL':
dyn = dynamics.DynamicsSymplectic(cfg)
else:
raise errors.UsageError("No option for dyn_type: " + dyn_type)
dyn.apply_params(dyn_params)
dyn._drift_dyn_gen_checked = True
dyn._ctrl_dyn_gen_checked = True
# Create the PropagatorComputer instance
# The default will be typically be the best option
if prop_type == 'DEF' or prop_type is None or prop_type == '':
# Do nothing use the default for the Dynamics
pass
elif prop_type == 'APPROX':
if not isinstance(dyn.prop_computer, propcomp.PropCompApproxGrad):
dyn.prop_computer = propcomp.PropCompApproxGrad(dyn)
elif prop_type == 'DIAG':
if not isinstance(dyn.prop_computer, propcomp.PropCompDiag):
dyn.prop_computer = propcomp.PropCompDiag(dyn)
elif prop_type == 'AUG_MAT':
if not isinstance(dyn.prop_computer, propcomp.PropCompAugMat):
dyn.prop_computer = propcomp.PropCompAugMat(dyn)
elif prop_type == 'FRECHET':
if not isinstance(dyn.prop_computer, propcomp.PropCompFrechet):
dyn.prop_computer = propcomp.PropCompFrechet(dyn)
else:
raise errors.UsageError("No option for prop_type: " + prop_type)
dyn.prop_computer.apply_params(prop_params)
# Create the FidelityComputer instance
# The default will be typically be the best option
# Note: the FidCompTraceDiffApprox is a subclass of FidCompTraceDiff
# so need to check this type first
fid_type_up = _upper_safe(fid_type)
if fid_type_up == 'DEF' or fid_type_up is None or fid_type_up == '':
# None given, use the default for the Dynamics
pass
elif fid_type_up == 'TDAPPROX':
if not isinstance(dyn.fid_computer, fidcomp.FidCompTraceDiffApprox):
dyn.fid_computer = fidcomp.FidCompTraceDiffApprox(dyn)
elif fid_type_up == 'TRACEDIFF':
if not isinstance(dyn.fid_computer, fidcomp.FidCompTraceDiff):
dyn.fid_computer = fidcomp.FidCompTraceDiff(dyn)
elif fid_type_up == 'UNIT':
if not isinstance(dyn.fid_computer, fidcomp.FidCompUnitary):
dyn.fid_computer = fidcomp.FidCompUnitary(dyn)
else:
raise errors.UsageError("No option for fid_type: " + fid_type)
dyn.fid_computer.apply_params(fid_params)
# Currently the only working option for tslot computer is
# TSlotCompUpdateAll.
# so just apply the parameters
dyn.tslot_computer.apply_params(tslot_params)
# Create the Optimiser instance
optim_method_up = _upper_safe(optim_method)
if optim_method is None or optim_method_up == '':
raise errors.UsageError("Optimisation method must be specified "
"via 'optim_method' parameter")
elif optim_method_up == 'FMIN_BFGS':
optim = optimizer.OptimizerBFGS(cfg, dyn)
elif optim_method_up == 'LBFGSB' or optim_method_up == 'FMIN_L_BFGS_B':
optim = optimizer.OptimizerLBFGSB(cfg, dyn)
elif optim_method_up == 'FMIN':
if alg_up == 'CRAB':
optim = optimizer.OptimizerCrabFmin(cfg, dyn)
else:
raise errors.UsageError(
"Invalid optim_method '{}' for '{}' algorthim".format(
optim_method, alg))
else:
# Assume that the optim_method is a valid
#scipy.optimize.minimize method
# Choose an optimiser based on the algorithm
if alg_up == 'CRAB':
optim = optimizer.OptimizerCrab(cfg, dyn)
else:
optim = optimizer.Optimizer(cfg, dyn)
optim.alg = alg
optim.method = optim_method
optim.amp_lbound = amp_lbound
optim.amp_ubound = amp_ubound
optim.apply_params(optim_params)
# Create the TerminationConditions instance
tc = termcond.TerminationConditions()
tc.fid_err_targ = fid_err_targ
tc.min_gradient_norm = min_grad
tc.max_iterations = max_iter
tc.max_wall_time = max_wall_time
optim.termination_conditions = tc
optim.apply_method_params(method_params)
if gen_stats:
# Create a stats object
# Note that stats object is optional
# if the Dynamics and Optimizer stats attribute is not set
# then no stats will be collected, which could improve performance
if amp_update_mode == 'DYNAMIC':
sts = stats.StatsDynTsUpdate()
else:
sts = stats.Stats()
dyn.stats = sts
optim.stats = sts
# Configure the dynamics
dyn.drift_dyn_gen = drift
dyn.ctrl_dyn_gen = ctrls
dyn.initial = initial
dyn.target = target
if tau is None:
# Check that parameters have been supplied to generate the
# timeslot durations
try:
evo_time / num_tslots
except:
raise errors.UsageError(
"Either the timeslot durations should be supplied as an "
"array 'tau' or the number of timeslots 'num_tslots' "
"and the evolution time 'evo_time' must be given.")
dyn.num_tslots = num_tslots
dyn.evo_time = evo_time
else:
dyn.tau = tau
# this function is called, so that the num_ctrls attribute will be set
n_ctrls = dyn.num_ctrls
ramping_pgen = None
if ramping_pulse_type:
ramping_pgen = pulsegen.create_pulse_gen(
pulse_type=ramping_pulse_type, dyn=dyn,
pulse_params=ramping_pulse_params)
if alg_up == 'CRAB':
# Create a pulse generator for each ctrl
crab_pulse_params = None
num_coeffs = None
init_coeff_scaling = None
if isinstance(alg_params, dict):
num_coeffs = alg_params.get('num_coeffs')
init_coeff_scaling = alg_params.get('init_coeff_scaling')
if 'crab_pulse_params' in alg_params:
crab_pulse_params = alg_params.get('crab_pulse_params')
guess_pulse_type = init_pulse_type
if guess_pulse_type:
guess_pulse_action = None
guess_pgen = pulsegen.create_pulse_gen(
pulse_type=guess_pulse_type, dyn=dyn)
guess_pgen.scaling = pulse_scaling
guess_pgen.offset = pulse_offset
if init_pulse_params is not None:
guess_pgen.apply_params(init_pulse_params)
guess_pulse_action = init_pulse_params.get('pulse_action')
optim.pulse_generator = []
for j in range(n_ctrls):
crab_pgen = pulsegen.PulseGenCrabFourier(
dyn=dyn, num_coeffs=num_coeffs)
if init_coeff_scaling is not None:
crab_pgen.scaling = init_coeff_scaling
if isinstance(crab_pulse_params, dict):
crab_pgen.apply_params(crab_pulse_params)
lb = None
if amp_lbound:
if isinstance(amp_lbound, list):
try:
lb = amp_lbound[j]
except:
lb = amp_lbound[-1]
else:
lb = amp_lbound
ub = None
if amp_ubound:
if isinstance(amp_ubound, list):
try:
ub = amp_ubound[j]
except:
ub = amp_ubound[-1]
else:
ub = amp_ubound
crab_pgen.lbound = lb
crab_pgen.ubound = ub
if guess_pulse_type:
guess_pgen.lbound = lb
guess_pgen.ubound = ub
crab_pgen.guess_pulse = guess_pgen.gen_pulse()
if guess_pulse_action:
crab_pgen.guess_pulse_action = guess_pulse_action
if ramping_pgen:
crab_pgen.ramping_pulse = ramping_pgen.gen_pulse()
optim.pulse_generator.append(crab_pgen)
#This is just for the debug message now
pgen = optim.pulse_generator[0]
else:
# Create a pulse generator of the type specified
pgen = pulsegen.create_pulse_gen(pulse_type=init_pulse_type, dyn=dyn,
pulse_params=init_pulse_params)
pgen.scaling = pulse_scaling
pgen.offset = pulse_offset
pgen.lbound = amp_lbound
pgen.ubound = amp_ubound
optim.pulse_generator = pgen
if log_level <= logging.DEBUG:
logger.debug(
"Optimisation config summary...\n"
" object classes:\n"
" optimizer: " + optim.__class__.__name__ +
"\n dynamics: " + dyn.__class__.__name__ +
"\n tslotcomp: " + dyn.tslot_computer.__class__.__name__ +
"\n fidcomp: " + dyn.fid_computer.__class__.__name__ +
"\n propcomp: " + dyn.prop_computer.__class__.__name__ +
"\n pulsegen: " + pgen.__class__.__name__)
return optim
| {
"content_hash": "aff2b789f2074f409955ef11d72d4138",
"timestamp": "",
"source": "github",
"line_count": 1875,
"max_line_length": 79,
"avg_line_length": 42.086933333333334,
"alnum_prop": 0.6573568360092761,
"repo_name": "qutip/qutip",
"id": "05268f521542448a224b6368bd501785f226376f",
"size": "79107",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qutip/control/pulseoptim.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "13979"
},
{
"name": "Cython",
"bytes": "354994"
},
{
"name": "OpenQASM",
"bytes": "1718"
},
{
"name": "Python",
"bytes": "2810040"
}
],
"symlink_target": ""
} |
"""Unit tests for SerializedDagModel."""
import unittest
from airflow import DAG, example_dags as example_dags_module
from airflow.models import DagBag
from airflow.models.dagcode import DagCode
from airflow.models.serialized_dag import SerializedDagModel as SDM
from airflow.serialization.serialized_objects import SerializedDAG
from airflow.utils.session import create_session
from tests.test_utils.asserts import assert_queries_count
# To move it to a shared module.
def make_example_dags(module):
"""Loads DAGs from a module for test."""
dagbag = DagBag(module.__path__[0])
return dagbag.dags
def clear_db_serialized_dags():
with create_session() as session:
session.query(SDM).delete()
class SerializedDagModelTest(unittest.TestCase):
"""Unit tests for SerializedDagModel."""
def setUp(self):
clear_db_serialized_dags()
def tearDown(self):
clear_db_serialized_dags()
def test_dag_fileloc_hash(self):
"""Verifies the correctness of hashing file path."""
assert DagCode.dag_fileloc_hash('/airflow/dags/test_dag.py') == 33826252060516589
def _write_example_dags(self):
example_dags = make_example_dags(example_dags_module)
for dag in example_dags.values():
SDM.write_dag(dag)
return example_dags
def test_write_dag(self):
"""DAGs can be written into database."""
example_dags = self._write_example_dags()
with create_session() as session:
for dag in example_dags.values():
assert SDM.has_dag(dag.dag_id)
result = session.query(SDM.fileloc, SDM.data).filter(SDM.dag_id == dag.dag_id).one()
assert result.fileloc == dag.full_filepath
# Verifies JSON schema.
SerializedDAG.validate_schema(result.data)
def test_serialized_dag_is_updated_only_if_dag_is_changed(self):
"""Test Serialized DAG is updated if DAG is changed"""
example_dags = make_example_dags(example_dags_module)
example_bash_op_dag = example_dags.get("example_bash_operator")
dag_updated = SDM.write_dag(dag=example_bash_op_dag)
assert dag_updated is True
with create_session() as session:
s_dag = session.query(SDM).get(example_bash_op_dag.dag_id)
# Test that if DAG is not changed, Serialized DAG is not re-written and last_updated
# column is not updated
dag_updated = SDM.write_dag(dag=example_bash_op_dag)
s_dag_1 = session.query(SDM).get(example_bash_op_dag.dag_id)
assert s_dag_1.dag_hash == s_dag.dag_hash
assert s_dag.last_updated == s_dag_1.last_updated
assert dag_updated is False
# Update DAG
example_bash_op_dag.tags += ["new_tag"]
assert set(example_bash_op_dag.tags) == {"example", "example2", "new_tag"}
dag_updated = SDM.write_dag(dag=example_bash_op_dag)
s_dag_2 = session.query(SDM).get(example_bash_op_dag.dag_id)
assert s_dag.last_updated != s_dag_2.last_updated
assert s_dag.dag_hash != s_dag_2.dag_hash
assert s_dag_2.data["dag"]["tags"] == ["example", "example2", "new_tag"]
assert dag_updated is True
def test_read_dags(self):
"""DAGs can be read from database."""
example_dags = self._write_example_dags()
serialized_dags = SDM.read_all_dags()
assert len(example_dags) == len(serialized_dags)
for dag_id, dag in example_dags.items():
serialized_dag = serialized_dags[dag_id]
assert serialized_dag.dag_id == dag.dag_id
assert set(serialized_dag.task_dict) == set(dag.task_dict)
def test_remove_dags_by_id(self):
"""DAGs can be removed from database."""
example_dags_list = list(self._write_example_dags().values())
# Remove SubDags from the list as they are not stored in DB in a separate row
# and are directly added in Json blob of the main DAG
filtered_example_dags_list = [dag for dag in example_dags_list if not dag.is_subdag]
# Tests removing by dag_id.
dag_removed_by_id = filtered_example_dags_list[0]
SDM.remove_dag(dag_removed_by_id.dag_id)
assert not SDM.has_dag(dag_removed_by_id.dag_id)
def test_remove_dags_by_filepath(self):
"""DAGs can be removed from database."""
example_dags_list = list(self._write_example_dags().values())
# Remove SubDags from the list as they are not stored in DB in a separate row
# and are directly added in Json blob of the main DAG
filtered_example_dags_list = [dag for dag in example_dags_list if not dag.is_subdag]
# Tests removing by file path.
dag_removed_by_file = filtered_example_dags_list[0]
# remove repeated files for those DAGs that define multiple dags in the same file (set comprehension)
example_dag_files = list({dag.full_filepath for dag in filtered_example_dags_list})
example_dag_files.remove(dag_removed_by_file.full_filepath)
SDM.remove_deleted_dags(example_dag_files)
assert not SDM.has_dag(dag_removed_by_file.dag_id)
def test_bulk_sync_to_db(self):
dags = [
DAG("dag_1"),
DAG("dag_2"),
DAG("dag_3"),
]
with assert_queries_count(10):
SDM.bulk_sync_to_db(dags)
| {
"content_hash": "63a3ba59c7a4a39a7b404e95ce294d29",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 109,
"avg_line_length": 41.06015037593985,
"alnum_prop": 0.637062809009339,
"repo_name": "sekikn/incubator-airflow",
"id": "db8282f943f7b0499db3a8ca87a9b465883f24a7",
"size": "6249",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/models/test_serialized_dag.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "15900"
},
{
"name": "HTML",
"bytes": "151266"
},
{
"name": "JavaScript",
"bytes": "25486"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "10792443"
},
{
"name": "Shell",
"bytes": "243458"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
} |
"""Description: Call peaks from two wiggle file, one for treatment and one for control.
Copyright (c) 2010 Tao Liu <taoliu@jimmy.harvard.edu>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD License (see the file COPYING included with
the distribution).
@status: experimental
@version: $Revision$
@author: Tao Liu
@contact: taoliu@jimmy.harvard.edu
"""
# ------------------------------------
# python modules
# ------------------------------------
import os
import sys
import re
import logging
from array import array
from math import log as mathlog
from optparse import OptionParser
from taolib.CoreLib.Parser import *
from taolib.CoreLib.BasicStat.Prob import *
from taolib.CoreLib.FeatIO import *
# ------------------------------------
# constants
# ------------------------------------
logging.basicConfig(level=20,
format='%(levelname)-5s @ %(asctime)s: %(message)s ',
datefmt='%a, %d %b %Y %H:%M:%S',
stream=sys.stderr,
filemode="w"
)
# ------------------------------------
# Misc functions
# ------------------------------------
error = logging.critical # function alias
warn = logging.warning
debug = logging.debug
info = logging.info
# ------------------------------------
# Classes
# ------------------------------------
# ------------------------------------
# Main function
# ------------------------------------
def main():
usage = "usage: %prog [options]"
description = "Call peaks from two wiggle file, one for treatment and one for control."
optparser = OptionParser(version="%prog 0.1",description=description,usage=usage,add_help_option=False)
optparser.add_option("-h","--help",action="help",help="Show this help message and exit.")
optparser.add_option("-t","--tfile",dest="tfile",type="string",
help="treatment wiggle file. *REQUIRED")
optparser.add_option("-c","--cfile",dest="cfile",type="string",
help="control wiggle file. *REQUIRED")
optparser.add_option("-n","--name",dest="name",type="string",
help="name of output files. *REQUIRED")
optparser.add_option("-C","--cutoff",dest="cutoff",type="float",
help="Cutoff depends on which method you choose. The default is 50 for -log10(poisson_pvalue) which is equal to pvalue 1e-5",default=50)
optparser.add_option("--gsize",dest="gsize",type="int",
help="genome size. default: 100000000",default=100000000)
optparser.add_option("-w","--window",dest="window",type="int",
help="the window centered at each data point to check the local bias. default:1000",default=1000)
optparser.add_option("-l","--min-length",dest="minlen",type="int",
help="minimum length of peak, default: 300",default=300)
optparser.add_option("-g","--maxgap",dest="maxgap",type="int",
help="maximum gap between significant points in a peak, default: 50",default=50)
optparser.add_option("-m","--method",dest="method",type="string",
help="""scoring method can be either
"poisson","diff", or "fc". After the adjusted average values from
control data are calculated for each window surrounding every data
point as the measurement of local bias, the following method will be
applied to compute a score: 1) poisson: The score will be the
-log10(poisson pvalue) for each data point. The poisson pvalue =
CDF(treatment_value, lambda=local_bias); 2) diff: The local bias will
be deducted from the treatment values as scores. 3) fc: The score will
be the fold change/ratio between treatment value to local bias. The
default is "poisson".""", default="poisson")
(options,args) = optparser.parse_args()
method = options.method.lower()
if method == "poisson":
func = poisson_score
elif method == "diff":
func = diff_score
elif method == "fc":
func = fc_score
else:
error("Unrecognized scoring method: %s" % (method))
sys.exit(1)
if not options.tfile or not options.cfile or not options.name:
optparser.print_help()
sys.exit()
tf = options.tfile
cf = options.cfile
if not os.path.isfile(tf) or not os.path.isfile(cf):
error("wiggle files are not valid!")
sys.exit(1)
try:
tfhd = open(tf)
cfhd = open(cf)
except:
error("Can't read wiggle files")
sys.exit(1)
try:
bfhd = open(options.name+"_peaks.bed","w")
except:
error("Can't open %s to write" % options.name+"_peaks.bed")
sys.exit(1)
try:
wfhd = open(options.name+"_scores.wig","w")
except:
error("Can't open %s to write" % options.name+"_scores.wig")
sys.exit(1)
info("open treatment wiggle file...")
tio = WiggleIO.WiggleIO(tfhd)
info("construct treatment wiggle track object...")
ttrack = tio.build_wigtrack()
tsum = ttrack.summary()[0]
lambda_bg = tsum/options.gsize*50
info("background average value: %.2f" % lambda_bg)
info("open control wiggle file...")
cio = WiggleIO.WiggleIO(cfhd)
info("construct control wiggle track object...")
ctrack = cio.build_wigtrack()
csum = ctrack.summary()[0]
tc_ratio = tsum/csum
info("treatment/control = %.2f, this value will be used to adjust the local bias." % tc_ratio)
info("construct control binkeeper object...")
ctrack = cio.build_binKeeper()
info("build pvalues based on local lambda calculation...")
strack = build_scores(ttrack,ctrack,func=func, w=options.window,space=50,bg=lambda_bg,tc_ratio=tc_ratio)
info("write scores to wiggle file...")
strack.write_wig(wfhd,"scores",shift=0)
wfhd.close()
info("call peaks...")
wpeaks = strack.call_peaks(cutoff=options.cutoff,min_length=options.minlen,max_gap=options.maxgap)
info("write to bed file...")
bfhd.write(wpeaks.tobed())
bfhd.close()
info("finished")
def build_scores (treat, control, func, w=1000, space=10, bg=0.0001,tc_ratio=1.0):
chrs = treat.get_chr_names()
scores = WigTrackI()
scores.span = treat.span
for chrom in chrs:
t = treat.get_data_by_chr(chrom)
try:
c = control[chrom]
except: # control doesn't have chrom
continue
info("Calculate lambdas for chromosome %s" % chrom)
c_lambda = __lambda ( t,c, w = w, space=space,tc_ratio=tc_ratio, bg=bg )
info("Calculate scores")
for i in xrange(len(t[0])):
if t[0][i] == 0:
continue
s = func(t[1][i],c_lambda[i])
scores.add_loc(chrom,t[0][i],s)
return scores
def __lambda ( t, c, w = 10000, space=10, tc_ratio = 1.0, bg=0.00001):
"""Calculate local lambdas
ct_ratio: ratio of control and treatment tags
w : sliding window length
"""
l = len(t[0])
ret = array(FBYTE4,[])
reta = ret.append
for p in t[0]:
s = max(0,p-w/2)
e = p+w/2
ss = max(0,p-50)
se = p+50
try:
values = c.pp2v(s,e)
ave = max(bg,sum(values)/w*space*tc_ratio)
except:
ave = bg
try:
values = c.pp2v(ss,se)
ave = max(ave,sum(values)/100*space*tc_ratio)
except:
pass
reta(ave)
return ret
def poisson_score ( t, c ):
p_tmp = poisson_cdf(t,c,lower=False)
if p_tmp <= 0:
log_pvalue = 3100
else:
log_pvalue = mathlog(p_tmp,10) * -10
return log_pvalue
def diff_score ( t, c ):
return t-c
def fc_score ( t, c ):
if c == 0:
return None
else:
return t/c
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
sys.stderr.write("User interrupt me! ;-) See you!\n")
sys.exit(0)
| {
"content_hash": "3b77ae4a065f9bdfff4d44f088009355",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 161,
"avg_line_length": 33.247933884297524,
"alnum_prop": 0.5753169276659209,
"repo_name": "taoliu/taolib",
"id": "735f35bebb54cb8bc6afe07fe68825d3bd450e22",
"size": "8113",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Scripts/wig_call_peaks2.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "844262"
}
],
"symlink_target": ""
} |
"""
Random Value Property with Sleep
This application is a server of analog value objects that return a random
number when the present value is read. This version has an additional
'sleep' time that slows down its performance.
"""
import os
import random
import time
from bacpypes.debugging import bacpypes_debugging, ModuleLogger
from bacpypes.consolelogging import ConfigArgumentParser
from bacpypes.core import run
from bacpypes.primitivedata import Real
from bacpypes.object import AnalogValueObject, Property, register_object_type
from bacpypes.errors import ExecutionError
from bacpypes.app import BIPSimpleApplication
from bacpypes.local.device import LocalDeviceObject
# some debugging
_debug = 0
_log = ModuleLogger(globals())
# settings
SLEEP_TIME = float(os.getenv('SLEEP_TIME', 0.1))
RANDOM_OBJECT_COUNT = int(os.getenv('RANDOM_OBJECT_COUNT', 10))
# globals
args = None
#
# RandomValueProperty
#
class RandomValueProperty(Property):
def __init__(self, identifier):
if _debug: RandomValueProperty._debug("__init__ %r", identifier)
Property.__init__(self, identifier, Real, default=0.0, optional=True, mutable=False)
def ReadProperty(self, obj, arrayIndex=None):
if _debug: RandomValueProperty._debug("ReadProperty %r arrayIndex=%r", obj, arrayIndex)
global args
# access an array
if arrayIndex is not None:
raise ExecutionError(errorClass='property', errorCode='propertyIsNotAnArray')
# sleep a little
time.sleep(args.sleep)
# return a random value
value = random.random() * 100.0
if _debug: RandomValueProperty._debug(" - value: %r", value)
return value
def WriteProperty(self, obj, value, arrayIndex=None, priority=None, direct=False):
if _debug: RandomValueProperty._debug("WriteProperty %r %r arrayIndex=%r priority=%r direct=%r", obj, value, arrayIndex, priority, direct)
raise ExecutionError(errorClass='property', errorCode='writeAccessDenied')
bacpypes_debugging(RandomValueProperty)
#
# Random Value Object Type
#
class RandomAnalogValueObject(AnalogValueObject):
properties = [
RandomValueProperty('presentValue'),
]
def __init__(self, **kwargs):
if _debug: RandomAnalogValueObject._debug("__init__ %r", kwargs)
AnalogValueObject.__init__(self, **kwargs)
bacpypes_debugging(RandomAnalogValueObject)
register_object_type(RandomAnalogValueObject)
#
# __main__
#
def main():
global args
# parse the command line arguments
parser = ConfigArgumentParser(description=__doc__)
# add an option to override the sleep time
parser.add_argument('--sleep', type=float,
help="sleep before returning the value",
default=SLEEP_TIME,
)
# parse the command line arguments
args = parser.parse_args()
if _debug: _log.debug("initialization")
if _debug: _log.debug(" - args: %r", args)
# make a device object
this_device = LocalDeviceObject(ini=args.ini)
if _debug: _log.debug(" - this_device: %r", this_device)
# make a sample application
this_application = BIPSimpleApplication(this_device, args.ini.address)
# make some random input objects
for i in range(1, RANDOM_OBJECT_COUNT+1):
ravo = RandomAnalogValueObject(
objectIdentifier=('analogValue', i),
objectName='Random-%d' % (i,),
)
_log.debug(" - ravo: %r", ravo)
this_application.add_object(ravo)
# make sure they are all there
_log.debug(" - object list: %r", this_device.objectList)
_log.debug("running")
run()
_log.debug("fini")
if __name__ == "__main__":
main()
| {
"content_hash": "446c75ca8572b71c992b8c3eae10be56",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 146,
"avg_line_length": 27.83582089552239,
"alnum_prop": 0.6798927613941019,
"repo_name": "JoelBender/bacpypes",
"id": "0f9cf3de59409d3b58369988fd75635d768436e1",
"size": "3753",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samples/RandomAnalogValueSleep.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3856304"
},
{
"name": "Shell",
"bytes": "4906"
}
],
"symlink_target": ""
} |
from csv2_common import check_keys, requests, show_active_user_groups, show_table, verify_yaml_file
from subprocess import Popen, PIPE
import filecmp
import os
KEY_MAP = {
'-gn': 'group_name',
'-go': 'group_option',
'-SU': 'is_superuser',
'-ucn': 'cert_cn',
'-un': 'username',
'-upw': 'password',
}
def _filter_by_user(gvar, qs):
"""
Internal function to filter a query set by the specified user name.
"""
if 'username' in gvar['command_args']:
for _ix in range(len(qs)-1, -1, -1):
if qs[_ix]['username'] != gvar['command_args']['username']:
del(qs[_ix])
return qs
def add(gvar):
"""
Add a user.
"""
mandatory = ['-un', '-upw']
required = []
optional = ['-g', '-gn', '-H', '-h', '-SU', '-s', '-ucn', '-v', '-v', '-x509', '-xA']
if gvar['retrieve_options']:
return mandatory + required + optional
# Check for missing arguments or help required.
form_data = check_keys(
gvar,
mandatory,
required,
optional,
key_map=KEY_MAP)
# Create the user.
response = requests(
gvar,
'/user/add/',
form_data
)
if response['message']:
print(response['message'])
def delete(gvar):
"""
Delete a user.
"""
mandatory = ['-un']
required = []
optional = ['-g', '-H', '-h', '-s', '-v', '-v', '-x509', '-xA', '-Y']
if gvar['retrieve_options']:
return mandatory + required + optional
# Check for missing arguments or help required.
check_keys(gvar, mandatory, required, optional)
# Check that the target user exists.
response = requests(gvar, '/user/list/')
_found = False
for row in response['user_list']:
if row['username'] == gvar['user_settings']['username']:
_found = True
break
if not _found:
print('Error: "%s user delete" cannot delete "%s", user doesn\'t exist.' % (gvar['command_name'], gvar['user_settings']['username']))
exit(1)
# Confirm user delete.
if not gvar['user_settings']['yes']:
print('Are you sure you want to delete user "%s"? (yes|..)' % gvar['user_settings']['username'])
_reply = input()
if _reply != 'yes':
print('%s user delete "%s" cancelled.' % (gvar['command_name'], gvar['user_settings']['username']))
exit(0)
# Delete the user.
response = requests(
gvar,
'/user/delete/',
form_data = {
'username': gvar['user_settings']['username']
}
)
if response['message']:
print(response['message'])
def list(gvar):
"""
List users.
"""
mandatory = []
required = []
optional = ['-CSEP', '-CSV', '-g', '-H', '-h', '-NV', '-ok', '-r', '-s', '-un', '-V', '-VC', '-v', '-x509', '-xA']
if gvar['retrieve_options']:
return mandatory + required + optional
# Check for missing arguments or help required.
check_keys(gvar, mandatory, required, optional)
# Retrieve data (possibly after changing the user).
response = requests(gvar, '/user/list/')
if response['message']:
print(response['message'])
# Filter response as requested (or not).
user_list = _filter_by_user(gvar, response['user_list'])
# Print report
show_active_user_groups(gvar, response)
show_table(
gvar,
user_list,
[
'username/Username,k',
'cert_cn/Common Name',
'active_group/Active Group',
'user_groups/User Groups',
'available_groups/Not In Groups',
'is_superuser/Super User',
'join_date/Joined',
],
title="Users",
)
def update(gvar):
"""
Modify the specified user.
"""
mandatory = ['-un']
required = []
optional = ['-g', '-gn', '-go', '-H', '-h', '-SU', '-s', '-ucn', '-upw', '-v', '-x509', '-xA']
if gvar['retrieve_options']:
return mandatory + required + optional
# Check for missing arguments or help required.
form_data = check_keys(
gvar,
mandatory,
required,
optional,
key_map=KEY_MAP)
if len(form_data) < 2:
print('Error: "%s user update" requires at least one option to update.' % gvar['command_name'])
exit(1)
# Create the user.
response = requests(
gvar,
'/user/update/',
form_data
)
if response['message']:
print(response['message'])
| {
"content_hash": "433219c3013e27d5adda29a738cd152d",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 141,
"avg_line_length": 25.483333333333334,
"alnum_prop": 0.5275779376498801,
"repo_name": "hep-gc/cloudscheduler",
"id": "333f49f29e8063f2d2856182bcaa52b79ca8c181",
"size": "4587",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "cli/bin/csv2_user.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "71824"
},
{
"name": "Gherkin",
"bytes": "1017"
},
{
"name": "HTML",
"bytes": "362015"
},
{
"name": "JavaScript",
"bytes": "144210"
},
{
"name": "Jinja",
"bytes": "51122"
},
{
"name": "Python",
"bytes": "2725635"
},
{
"name": "Roff",
"bytes": "189652"
},
{
"name": "Shell",
"bytes": "33321"
}
],
"symlink_target": ""
} |
from urllib.request import urlopen
from random import randint
def wordListSum(wordList):
sum = 0
for word, value in wordList.items():
sum += value
return sum
def retrieveRandomWord(wordList):
randIndex = randint(1, wordListSum(wordList))
for word, value in wordList.items():
randIndex -= value
if randIndex <= 0:
return word
def buildWordDict(text):
#{word_a : {word_b : 2, word_c : 1, word_d : 1},
# word_e : {word_b : 5, word_d : 2},...}
#Remove newlines and quotes
text = text.replace("\n", " ");
text = text.replace("\"", "");
#Make sure punctuation marks are treated as their own "words,"
#so that they will be included in the Markov chain
punctuation = [',','.',';',':']
for symbol in punctuation:
text = text.replace(symbol, " "+symbol+" ");
words = text.split(" ")
#Filter out empty words
words = [word for word in words if word != ""]
wordDict = {}
for i in range(1, len(words)):
if words[i-1] not in wordDict:
#Create a new dictionary for this word
wordDict[words[i-1]] = {}
if words[i] not in wordDict[words[i-1]]:
wordDict[words[i-1]][words[i]] = 0
wordDict[words[i-1]][words[i]] = wordDict[words[i-1]][words[
i]] + 1
return wordDict
text = str(urlopen("http://pythonscraping.com/files/inaugurationSpeech.txt")
.read(), 'utf-8')
wordDict = buildWordDict(text)
#Generate a Markov chain of length 100
length = 100
chain = ""
currentWord = "I"
for i in range(0, length):
chain += currentWord+" "
currentWord = retrieveRandomWord(wordDict[currentWord])
print(chain) | {
"content_hash": "06cf92a2a76fbfc365747ed644d2ed41",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 76,
"avg_line_length": 34.08163265306123,
"alnum_prop": 0.6119760479041916,
"repo_name": "Danceiny/HackGirlfriend",
"id": "9af3b17ea6d1782e11b0b0ffe49f6892bb0658ad",
"size": "1670",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Temp/markov.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "372365"
},
{
"name": "C#",
"bytes": "446184"
},
{
"name": "C++",
"bytes": "51162"
},
{
"name": "CSS",
"bytes": "40242"
},
{
"name": "HTML",
"bytes": "2241063"
},
{
"name": "JavaScript",
"bytes": "38335"
},
{
"name": "Makefile",
"bytes": "141"
},
{
"name": "Python",
"bytes": "522540"
},
{
"name": "R",
"bytes": "3235"
},
{
"name": "Shell",
"bytes": "603"
},
{
"name": "Vue",
"bytes": "22778"
}
],
"symlink_target": ""
} |
import os
from supriya.tools.servertools.ServerObjectProxy import ServerObjectProxy
class BufferGroup(ServerObjectProxy):
r'''A buffer group.
::
>>> server = servertools.Server().boot()
::
>>> buffer_group = servertools.BufferGroup(buffer_count=4)
>>> buffer_group
<BufferGroup: {4} @ None>
::
>>> buffer_group.allocate(
... frame_count=8192,
... server=server,
... sync=True,
... )
<BufferGroup: {4} @ 0>
::
>>> buffer_group
<BufferGroup: {4} @ 0>
::
>>> buffer_group.free()
'''
### CLASS VARIABLES ###
__documentation_section__ = 'Main Classes'
__slots__ = (
'_buffer_id',
'_buffers',
)
### INITIALIZER ###
def __init__(
self,
buffer_count=1,
):
from supriya.tools import servertools
ServerObjectProxy.__init__(self)
self._buffer_id = None
buffer_count = int(buffer_count)
assert 0 < buffer_count
self._buffers = tuple(
servertools.Buffer(buffer_group_or_index=self)
for _ in range(buffer_count)
)
### SPECIAL METHODS ###
def __contains__(self, item):
return self.buffers.__contains__(item)
def __float__(self):
return float(self.buffer_id)
def __getitem__(self, index):
r'''Gets buffer at `index`.
Returns buffer.
'''
return self._buffers[index]
def __int__(self):
return int(self.buffer_id)
def __iter__(self):
return iter(self.buffers)
def __len__(self):
r'''Gets length of buffer group.
Returns integer.
'''
return len(self._buffers)
def __repr__(self):
r'''Gets interpreter representation of buffer group.
Returns string.
'''
string = '<{}: {{{}}} @ {}>'.format(
type(self).__name__,
len(self),
self.buffer_id
)
return string
### PRIVATE METHODS ###
def _register_with_local_server(self, server):
ServerObjectProxy.allocate(
self,
server=server,
)
allocator = self.server.buffer_allocator
buffer_id = allocator.allocate(len(self))
if buffer_id is None:
ServerObjectProxy.free(self)
raise ValueError
self._buffer_id = buffer_id
for buffer_ in self:
buffer_._register_with_local_server()
return buffer_id
### PUBLIC METHODS ###
def allocate(
self,
channel_count=1,
frame_count=None,
server=None,
sync=True,
):
r'''Allocates buffer group.
Returns buffer group.
'''
from supriya.tools import servertools
if self.is_allocated:
return
self._register_with_local_server(server)
channel_count = int(channel_count)
frame_count = int(frame_count)
assert 0 < channel_count
assert 0 < frame_count
message_bundler = servertools.MessageBundler(
server=server,
sync=sync,
)
with message_bundler:
for buffer_ in self:
request = buffer_._register_with_remote_server(
channel_count=channel_count,
frame_count=frame_count,
)
message_bundler.add_message(request)
return self
def free(self):
r'''Frees all buffers in buffer group.
Returns none.
'''
if not self.is_allocated:
return
for buffer_ in self:
buffer_.free()
buffer_id = self.buffer_id
self._buffer_id = None
self.server.buffer_allocator.free(buffer_id)
ServerObjectProxy.free(self)
def index(self, item):
return self.buffers.index(item)
@staticmethod
def from_file_paths(
file_paths,
server=None,
):
r'''Create a buffer group from `file_paths`.
::
>>> file_paths = Assets['*mono_1s*']
>>> len(file_paths)
4
::
>>> buffer_group = BufferGroup.from_file_paths(file_paths)
::
>>> for buffer_ in buffer_group:
... buffer_, buffer_.frame_count
...
(<Buffer: 0>, 44100)
(<Buffer: 1>, 44100)
(<Buffer: 2>, 44100)
(<Buffer: 3>, 44100)
Returns buffer group.
'''
from supriya.tools import servertools
for file_path in file_paths:
assert os.path.exists(file_path)
buffer_group = BufferGroup(buffer_count=len(file_paths))
buffer_group._register_with_local_server(server)
message_bundler = servertools.MessageBundler(
server=server,
sync=True,
)
with message_bundler:
for buffer_, file_path in zip(buffer_group.buffers, file_paths):
request = buffer_._register_with_remote_server(
file_path=file_path,
)
message_bundler.add_message(request)
return buffer_group
def zero(self):
r'''Analogous to SuperCollider's Buffer.zero.
'''
raise NotImplementedError
### PUBLIC PROPERTIES ###
@property
def buffer_id(self):
r'''Gets initial buffer id.
Returns integer or none.
'''
return self._buffer_id
@property
def buffers(self):
r'''Gets associated buffers.
Returns tuple or buffers.
'''
return self._buffers
@property
def is_allocated(self):
r'''Is true when buffer group is allocated. Otherwise false.
Returns boolean.
'''
return self.server is not None | {
"content_hash": "adffb8b46f991dc5814d55b31f53f8d3",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 76,
"avg_line_length": 24.23170731707317,
"alnum_prop": 0.5192081865458815,
"repo_name": "andrewyoung1991/supriya",
"id": "dbaf204f8923643f1f937839d127d8fc29921dde",
"size": "5987",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "supriya/tools/servertools/BufferGroup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "6712"
},
{
"name": "CSS",
"bytes": "446"
},
{
"name": "HTML",
"bytes": "1083"
},
{
"name": "JavaScript",
"bytes": "6163"
},
{
"name": "Makefile",
"bytes": "6775"
},
{
"name": "Python",
"bytes": "2693776"
}
],
"symlink_target": ""
} |
import re
import json
import urwid
from api import Api
from helper import Helper
class State(object):
def __init__(self):
# Api calls
self.api = Api()
# Save temporary data for quick opening (open <index> command)
self.current_threads = {'board': None, 'list': []}
# JSON data
self.boards_json = None
self.threads_json = None
self.thread_json = None
self.archive_json = None
def listboards(self):
# Do not call the API more than once
if not self.boards_json:
data = self.api.get_boards()
# Determine if an error occured
if not data['error']:
self.boards_json = data['result']
else:
return data['error']
# Used for urwid.Text which is going to be displayed
text = [("\nDisplaying all boards. Codes are "), (('highlight'), "highlighted"), ".\n\n"]
if self.boards_json:
data = json.loads(self.boards_json)
for board in data['boards']:
text.append("/")
text.append(('highlight', board['board']))
text.append("/ - {}\n".format(board['title']))
return {'content': urwid.Text(text), 'status': "Displaying all boards"}
def open(self, text):
"""Open thread by index shown on the screen."""
arg = re.match(' \w+$', text[4:])
if self.current_threads['board'] and arg:
index = arg.group().strip()
# Check if convertible to integer
if index.isdigit():
index = int(index) - 1 # Indices are incremented by 1
else:
index = -1
# Check if regex matches + index in list
if arg and -1 < index < len(self.current_threads['list']):
board = self.current_threads['board']
thread_id = self.current_threads['list'][index] # Get from the saved thread list
return self.thread("thread {} {}".format(board, thread_id))
else:
return {'content': False, 'status': "Invalid argument. Wrong index? Use open <index>."}
else:
return {'content': False, 'status': "Open a board first to issue this command."}
def board(self, text):
arg1 = re.match(' \w+$', text[5:]) # board <code>
arg2 = re.match(' \w+ \w+$', text[5:]) # board <code> <page>
if arg1:
board = arg1.group().strip()
page = 1
elif arg2:
arg2 = arg2.group().strip()
arg2 = arg2.split(" ") # Split to get real arguments
board = arg2[0]
page = arg2[1]
else:
return {'content': False, 'status': "Invalid arguments. Use board <code> or board <code> <page>."}
data = self.api.get_threads(board, page)
# Determine if an error occured
if not data['error']:
self.threads_json = data['result']
else:
return data['error']
# List containing urwid widgets - to be wrapped up by urwid.Pile
content = [
urwid.Text([("\nDisplaying page "), (('highlight'), str(page)), " of /", (('highlight'), str(board)), "/.\n"])
]
if self.threads_json:
self.current_threads['board'] = board
del self.current_threads['list'][:] # Reset previous temporary data
data = json.loads(self.threads_json)
for index, post in enumerate(data['threads'], 1): # index starting from 1 to open threads without specifying full id (see: open <index>)
self.current_threads['list'].append(post['posts'][0]['no']) # Quick opening
_header = [
('highlight', "({}) ".format(index)),
('number', "No. {} ".format(post['posts'][0]['no'])),
('time', "{}".format(post['posts'][0]['now']))
]
# Check for empty comment
if "com" in post['posts'][0]:
_text = Helper.parse_comment(post['posts'][0]['com'])
else:
_text = "- no comment -\n"
content.append(urwid.Padding(urwid.Text(_header), 'left', left=0))
content.append(urwid.Padding(urwid.Text(_text), 'left', left=4)) # Indent text content from header
return {'content': urwid.Pile(content), 'status': "Displaying page {} of /{}/".format(page, board)}
def thread(self, text):
"""Open thread by specifying board and id."""
arg = re.match(' \w+ \w+$', text[6:]) # thread <board> <id>
if arg:
arg = arg.group().strip()
arg = arg.split(" ") # Split to get real arguments
board = arg[0]
thread_id = arg[1]
else:
return {'content': False, 'status': "Invalid arguments. Use thread <board> <id>."}
data = self.api.get_thread(board, thread_id)
# Determine if an error occured
if not data['error']:
self.thread_json = data['result']
else:
return data['error']
# List containing urwid widgets - to be wrapped up by urwid.Pile
content = [
urwid.Text([("\nDisplaying thread "), (('highlight'), str(thread_id)), " in /", (('highlight'), str(board)), "/.\n"])
]
if self.thread_json:
data = json.loads(self.thread_json)
for post in data["posts"]:
_header = [
('number', "No. {} ".format(post['no'])),
('time', "{}".format(post['now']))
]
if "com" in post:
_text = Helper.parse_comment(post['com'])
else:
_text = "- no comment -\n"
content.append(urwid.Padding(urwid.Text(_header), 'left', left=0))
content.append(urwid.Padding(urwid.Text(_text), 'left', left=4)) # Indent text content from header
return {'content': urwid.Pile(content), 'status': "Displaying thread {} in /{}/".format(thread_id, board)}
def archive(self, text):
arg = re.match(' \w+$', text[7:])
if arg:
board = arg.group().strip()
else:
return {'content': False, 'status': "Invalid argument. Use archive <code>."}
data = self.api.get_archive(board)
# Determine if an error occured
if not data['error']:
self.archive_json = data['result']
else:
return data['error']
# Used for urwid.Text which is going to be displayed
text = [("\nDisplaying archive"), " of /", (('highlight'), str(board)), "/.\n\n"]
if self.archive_json:
self.current_threads['board'] = board
del self.current_threads['list'][:] # Reset previous temporary data
data = json.loads(self.archive_json)
for index, thread in enumerate(data, 1): # index starting from 1 to open threads without specifying full id (see: open <index>)
self.current_threads['list'].append(thread) # Quick opening
text.append(('highlight', "[{}]".format(index)))
text.append(" No. {}\n".format(thread))
return {'content': urwid.Text(text), 'status': "Displaying archive of /{}/".format(board)}
def empty(self):
return {'content': False, 'status': "Type help for instructions, exit to quit."}
def invalid(self, text):
return {'content': False, 'status': "Invalid command: {}".format(text)}
@staticmethod
def splash():
return urwid.Text([
("\n\n ____ _ _ _ _ _ ____ _ ___\n"
" / ___| | | | / \ | \ | | / ___| | |_ _|\n"
" | | | |_| | / _ \ | \| | | | | | | |\n"
" | |___| _ |/ ___ \| |\ | | |___| |___ | |\n"
" \____|_| |_/_/ \_\_| \_| \____|_____|___|\n"
" chancli version 0.0.1")
])
@staticmethod
def help():
return {
'content': urwid.Text([
('underline', "\nBasic Commands\n\n"),
('Chancli utilizes the official 4chan API, which can be found at https://github.com/4chan/4chan-API.\n\n'),
('highlight', "listboards"), " - list available boards aside their code\n",
('highlight', "open <id>"), " - open a thread from the current window, specified by its index\n",
('highlight', "board <code>"), " - display the first page (ex: board g)\n",
('highlight', "board <code> <page>"), " - display the nth page starting from 1\n",
('highlight', "thread <board> <id>"), " - open a specific thread\n",
('highlight', "archive <code>"), " - display archived threads from a board\n\n",
('highlight', "help"), " - show this page\n",
('highlight', "license"), " - display the license page\n",
('highlight', "exit/quit/q"), " - exit the application"
]),
'status': "Help page"
}
@staticmethod
def license():
return {
'content': ("\nThe MIT License (MIT)\n\n"
"Copyright (c) 2015 Son Nguyen <mail@gimu.org>\n\n"
"Permission is hereby granted, free of charge, to any person obtaining a copy\n"
"of this software and associated documentation files (the \"Software\"), to deal\n"
"in the Software without restriction, including without limitation the rights\n"
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n"
"copies of the Software, and to permit persons to whom the Software is\n"
"furnished to do so, subject to the following conditions:\n\n"
"The above copyright notice and this permission notice shall be included in\n"
"all copies or substantial portions of the Software.\n\n"
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n"
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n"
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n"
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n"
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n"
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n"
"THE SOFTWARE."),
'status': "License page"
}
| {
"content_hash": "a64e197cae8b28c3bcb50abd82051fa6",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 148,
"avg_line_length": 42.21176470588235,
"alnum_prop": 0.5177443329617243,
"repo_name": "gimu/chancli",
"id": "0eefe2306fb479201c275206c3178881a2478467",
"size": "10787",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "state.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19673"
}
],
"symlink_target": ""
} |
"""An auto-roller for GN binaries into Chromium.
This script is used to update the GN binaries that a Chromium
checkout uses. In order to update the binaries, one must follow
four steps in order:
1. Trigger try jobs to build a new GN binary at tip-of-tree and upload
the newly-built binaries into the right Google CloudStorage bucket.
2. Wait for the try jobs to complete.
3. Update the buildtools repo with the .sha1 hashes of the newly built
binaries.
4. Update Chromium's DEPS file to the new version of the buildtools repo.
The script has four commands that correspond to the four steps above:
'build', 'wait', 'roll_buildtools', and 'roll_deps'.
The script has a fifth command, 'roll', that runs the four in order.
If given no arguments, the script will run the 'roll' command.
It can only be run on linux in a clean Chromium checkout; it should
error out in most cases if something bad happens, but the error checking
isn't yet foolproof.
"""
from __future__ import print_function
import argparse
import json
import os
import re
import subprocess
import sys
import tempfile
import time
import urllib2
depot_tools_path = None
for p in os.environ['PATH'].split(os.pathsep):
if (p.rstrip(os.sep).endswith('depot_tools') and
os.path.isfile(os.path.join(p, 'gclient.py'))):
depot_tools_path = p
assert depot_tools_path
if not depot_tools_path in sys.path:
sys.path.insert(0, depot_tools_path)
third_party_path = os.path.join(depot_tools_path, 'third_party')
if not third_party_path in sys.path:
sys.path.insert(0, third_party_path)
import upload
CHROMIUM_REPO = 'https://chromium.googlesource.com/chromium/src.git'
CODE_REVIEW_SERVER = 'https://codereview.chromium.org'
class GNRoller(object):
def __init__(self):
self.chromium_src_dir = None
self.buildtools_dir = None
self.old_gn_commitish = None
self.new_gn_commitish = None
self.old_gn_version = None
self.new_gn_version = None
self.reviewer = 'dpranke@chromium.org'
if os.getenv('USER') == 'dpranke':
self.reviewer = 'brettw@chromium.org'
def Roll(self):
parser = argparse.ArgumentParser()
parser.usage = __doc__
parser.add_argument('command', nargs='?', default='roll',
help='build|roll|roll_buildtools|roll_deps|wait'
' (%(default)s is the default)')
args = parser.parse_args()
command = args.command
ret = self.SetUp()
if not ret and command in ('roll', 'build'):
ret = self.TriggerBuild()
if not ret and command in ('roll', 'wait'):
ret = self.WaitForBuildToFinish()
if not ret and command in ('roll', 'roll_buildtools'):
ret = self.RollBuildtools()
if not ret and command in ('roll', 'roll_deps'):
ret = self.RollDEPS()
return ret
def SetUp(self):
if sys.platform != 'linux2':
print('roll_gn is only tested and working on Linux for now.')
return 1
ret, out, _ = self.Call('git config --get remote.origin.url')
origin = out.strip()
if ret or origin != CHROMIUM_REPO:
print('Not in a Chromium repo? git config --get remote.origin.url '
'returned %d: %s' % (ret, origin))
return 1
ret, _, _ = self.Call('git diff -q')
if ret:
print("Checkout is dirty, exiting")
return 1
_, out, _ = self.Call('git rev-parse --show-toplevel', cwd=os.getcwd())
self.chromium_src_dir = out.strip()
self.buildtools_dir = os.path.join(self.chromium_src_dir, 'buildtools')
self.new_gn_commitish, self.new_gn_version = self.GetNewVersions()
_, out, _ = self.Call('gn --version')
self.old_gn_version = out.strip()
_, out, _ = self.Call('git crrev-parse %s' % self.old_gn_version)
self.old_gn_commitish = out.strip()
return 0
def GetNewVersions(self):
_, out, _ = self.Call('git log -1 --grep Cr-Commit-Position')
commit_msg = out.splitlines()
first_line = commit_msg[0]
new_gn_commitish = first_line.split()[1]
last_line = commit_msg[-1]
new_gn_version = re.sub('.*master@{#(\d+)}', '\\1', last_line)
return new_gn_commitish, new_gn_version
def TriggerBuild(self):
ret, _, _ = self.Call('git new-branch build_gn_%s' % self.new_gn_version)
if ret:
print('Failed to create a new branch for build_gn_%s' %
self.new_gn_version)
return 1
self.MakeDummyDepsChange()
ret, out, err = self.Call('git commit -a -m "Build gn at %s"' %
self.new_gn_version)
if ret:
print('git commit failed: %s' % out + err)
return 1
print('Uploading CL to build GN at {#%s} - %s' %
(self.new_gn_version, self.new_gn_commitish))
ret, out, err = self.Call('git cl upload -f')
if ret:
print('git-cl upload failed: %s' % out + err)
return 1
print('Starting try jobs')
self.Call('git-cl try -b linux_chromium_gn_upload '
'-b mac_chromium_gn_upload '
'-b win8_chromium_gn_upload -r %s' % self.new_gn_commitish)
return 0
def MakeDummyDepsChange(self):
with open('DEPS') as fp:
deps_content = fp.read()
new_deps = deps_content.replace("'buildtools_revision':",
"'buildtools_revision': ")
with open('DEPS', 'w') as fp:
fp.write(new_deps)
def WaitForBuildToFinish(self):
print('Checking build')
results = self.CheckBuild()
while any(r['state'] == 'pending' for r in results.values()):
print()
print('Sleeping for 30 seconds')
time.sleep(30)
print('Checking build')
results = self.CheckBuild()
return 0 if all(r['state'] == 'success' for r in results.values()) else 1
def CheckBuild(self):
_, out, _ = self.Call('git-cl issue')
issue = int(out.split()[2])
_, out, _ = self.Call('git config user.email')
email = ''
rpc_server = upload.GetRpcServer(CODE_REVIEW_SERVER, email)
try:
props = json.loads(rpc_server.Send('/api/%d' % issue))
except Exception as _e:
raise
patchset = int(props['patchsets'][-1])
try:
patchset_data = json.loads(rpc_server.Send('/api/%d/%d' %
(issue, patchset)))
except Exception as _e:
raise
TRY_JOB_RESULT_STATES = ('success', 'warnings', 'failure', 'skipped',
'exception', 'retry', 'pending')
try_job_results = patchset_data['try_job_results']
if not try_job_results:
print('No try jobs found on most recent patchset')
return 1
results = {}
for job in try_job_results:
builder = job['builder']
if builder == 'linux_chromium_gn_upload':
platform = 'linux64'
elif builder == 'mac_chromium_gn_upload':
platform = 'mac'
elif builder == 'win8_chromium_gn_upload':
platform = 'win'
else:
print('Unexpected builder: %s')
continue
state = TRY_JOB_RESULT_STATES[int(job['result'])]
url_str = ' %s' % job['url']
build = url_str.split('/')[-1]
sha1 = '-'
results.setdefault(platform, {'build': -1, 'sha1': '', 'url': url_str})
if state == 'success':
jsurl = url_str.replace('/builders/', '/json/builders/')
fp = urllib2.urlopen(jsurl)
js = json.loads(fp.read())
fp.close()
for step in js['steps']:
if step['name'] == 'gn sha1':
sha1 = step['text'][1]
if results[platform]['build'] < build:
results[platform]['build'] = build
results[platform]['sha1'] = sha1
results[platform]['state'] = state
results[platform]['url'] = url_str
for platform, r in results.items():
print(platform)
print(' sha1: %s' % r['sha1'])
print(' state: %s' % r['state'])
print(' build: %s' % r['build'])
print(' url: %s' % r['url'])
print()
return results
def RollBuildtools(self):
results = self.CheckBuild()
if not all(r['state'] == 'success' for r in results.values()):
print("Roll isn't done or didn't succeed, exiting:")
return 1
desc = self.GetBuildtoolsDesc()
self.Call('git new-branch roll_buildtools_gn_%s' % self.new_gn_version,
cwd=self.buildtools_dir)
for platform in results:
fname = 'gn.exe.sha1' if platform == 'win' else 'gn.sha1'
path = os.path.join(self.buildtools_dir, platform, fname)
with open(path, 'w') as fp:
fp.write('%s\n' % results[platform]['sha1'])
desc_file = tempfile.NamedTemporaryFile(delete=False)
try:
desc_file.write(desc)
desc_file.close()
self.Call('git commit -a -F %s' % desc_file.name,
cwd=self.buildtools_dir)
self.Call('git-cl upload -f --send-mail',
cwd=self.buildtools_dir)
finally:
os.remove(desc_file.name)
self.Call('git cl push', cwd=self.buildtools_dir)
# Fetch the revision we just committed so that RollDEPS will find it.
self.Call('git cl fetch', cwd=self.buildtools_dir)
return 0
def RollDEPS(self):
_, out, _ = self.Call('git rev-parse origin/master',
cwd=self.buildtools_dir)
new_buildtools_commitish = out.strip()
new_deps_lines = []
old_buildtools_commitish = ''
with open(os.path.join(self.chromium_src_dir, 'DEPS')) as fp:
for l in fp.readlines():
m = re.match(".*'buildtools_revision':.*'(.+)',", l)
if m:
old_buildtools_commitish = m.group(1)
new_deps_lines.append(" 'buildtools_revision': '%s'," %
new_buildtools_commitish)
else:
new_deps_lines.append(l)
if not old_buildtools_commitish:
print('Could not update DEPS properly, exiting')
return 1
with open('DEPS', 'w') as fp:
fp.write(''.join(new_deps_lines) + '\n')
desc = self.GetDEPSRollDesc(old_buildtools_commitish,
new_buildtools_commitish)
desc_file = tempfile.NamedTemporaryFile(delete=False)
try:
desc_file.write(desc)
desc_file.close()
self.Call('git commit -a -F %s' % desc_file.name)
self.Call('git-cl upload -f --send-mail --commit-queue')
finally:
os.remove(desc_file.name)
return 0
def GetBuildtoolsDesc(self):
gn_changes = self.GetGNChanges()
return (
'Roll gn %s..%s (r%s:%s)\n'
'\n'
'%s'
'\n'
'TBR=%s\n' % (
self.old_gn_commitish,
self.new_gn_commitish,
self.old_gn_version,
self.new_gn_version,
gn_changes,
self.reviewer,
))
def GetDEPSRollDesc(self, old_buildtools_commitish, new_buildtools_commitish):
gn_changes = self.GetGNChanges()
return (
'Roll DEPS %s..%s\n'
'\n'
' in order to roll GN %s..%s (r%s:%s)\n'
'\n'
'%s'
'\n'
'TBR=%s\n'
'CQ_EXTRA_TRYBOTS=tryserver.chromium.mac:mac_chromium_gn_rel,'
'mac_chromium_gn_dbg;'
'tryserver.chromium.win:win8_chromium_gn_dbg,'
'win_chromium_gn_x64_rel\n' % (
old_buildtools_commitish,
new_buildtools_commitish,
self.old_gn_commitish,
self.new_gn_commitish,
self.old_gn_version,
self.new_gn_version,
gn_changes,
self.reviewer,
))
def GetGNChanges(self):
_, out, _ = self.Call(
"git log --pretty=' %h %s' " +
"%s..%s tools/gn" % (self.old_gn_commitish, self.new_gn_commitish))
return out
def Call(self, cmd, cwd=None):
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True,
cwd=(cwd or self.chromium_src_dir))
out, err = proc.communicate()
return proc.returncode, out, err
if __name__ == '__main__':
roller = GNRoller()
sys.exit(roller.Roll())
| {
"content_hash": "73b1c0bf1f4ee2133afdbd63548a5583",
"timestamp": "",
"source": "github",
"line_count": 384,
"max_line_length": 80,
"avg_line_length": 30.84375,
"alnum_prop": 0.5972644376899696,
"repo_name": "lihui7115/ChromiumGStreamerBackend",
"id": "ab85886a0b67513c2f868117da7284c833ae36d8",
"size": "12007",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/gn/bin/roll_gn.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "37073"
},
{
"name": "Batchfile",
"bytes": "8451"
},
{
"name": "C",
"bytes": "9508834"
},
{
"name": "C++",
"bytes": "242598549"
},
{
"name": "CSS",
"bytes": "943747"
},
{
"name": "DM",
"bytes": "60"
},
{
"name": "Groff",
"bytes": "2494"
},
{
"name": "HTML",
"bytes": "27281878"
},
{
"name": "Java",
"bytes": "14561064"
},
{
"name": "JavaScript",
"bytes": "20540839"
},
{
"name": "Makefile",
"bytes": "70864"
},
{
"name": "Objective-C",
"bytes": "1745880"
},
{
"name": "Objective-C++",
"bytes": "10008668"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "178732"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "482954"
},
{
"name": "Python",
"bytes": "8626890"
},
{
"name": "Shell",
"bytes": "481888"
},
{
"name": "Standard ML",
"bytes": "5106"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
from tkinter import *
import time
import random
SLEEP_TIME = 0.01
PADDLE_SPEED = [20, 10]
BALL_SPEED = [1, 3]
# Model for the Ball class
# canvas is the tkinter current canvas
# color is the color of the ball
# paddle_pos is the current position of the paddle
# speed [x, y] is the absolute speed of the ball
class Ball:
def __init__(self, canvas, color, speed):
self.canvas = canvas
self.color = color
self.ball = canvas.create_oval(0, 0, 10, 10, fill=color)
self.speed = speed
canvas.move(self.ball, 250, 250)
# paddle is the identifier of the paddle element
# (0,1)---------
# | |
# | |
# ---------(3,4)
def move(self, paddle):
cur_pos = self.canvas.coords(self.ball)
paddle_pos = self.canvas.coords(paddle)
self.canvas.move(self.ball, self.speed[0], self.speed[1])
if cur_pos[1] <= 0:
self.speed[1] = abs(self.speed[1])
if cur_pos[3] >= self.canvas.winfo_height():
self.speed[1] = -abs(self.speed[1])
if cur_pos[0] <= 0:
self.speed[0] = abs(self.speed[0])
if cur_pos[2] >= self.canvas.winfo_width():
self.speed[0] = -abs(self.speed[0])
# check against the top surface of the paddle
if cur_pos[2] >= paddle_pos[0] and cur_pos[2] <= paddle_pos[2] and cur_pos[3] >= paddle_pos[1] and cur_pos[3] <= paddle_pos[3] and self.speed[1] > 0:
self.speed[1] = -abs(self.speed[1])
if cur_pos[0] >= paddle_pos[0] and cur_pos[0] <= paddle_pos[2] and cur_pos[3] >= paddle_pos[1] and cur_pos[3] <= paddle_pos[3] and self.speed[1] > 0:
self.speed[1] = -abs(self.speed[1])
# check against the bottom surface of the paddle
if cur_pos[2] >= paddle_pos[0] and cur_pos[2] <= paddle_pos[2] and cur_pos[1] <= paddle_pos[3] and cur_pos[1] >= paddle_pos[1] and self.speed[1] < 0:
self.speed[1] = abs(self.speed[1])
if cur_pos[0] >= paddle_pos[0] and cur_pos[0] <= paddle_pos[2] and cur_pos[1] <= paddle_pos[3] and cur_pos[1] >= paddle_pos[1] and self.speed[1] < 0:
self.speed[1] = abs(self.speed[1])
def hit_bottom(self):
cur_pos = self.canvas.coords(self.ball)
if (cur_pos[3] >= 500):
return True
else:
return False
def stop(self):
self.speed = [0, 0]
class Paddle:
def __init__(self, canvas, color, x_speed, y_speed):
self.canvas = canvas
self.color = color
self.x_speed = x_speed
self.y_speed = y_speed
self.paddle = self.canvas.create_rectangle(0, 0, 60, 10, fill=color)
canvas.move(self.paddle, 250, 400)
self.canvas.bind_all('<KeyPress>', self.move)
def move(self, event):
cur_pos = self.canvas.coords(self.paddle)
x_speed = self.x_speed
y_speed = self.y_speed
if event.keysym == 'Left':
x_speed = -(abs(self.x_speed))
y_speed = 0
if cur_pos[0] <= 0:
x_speed = 0
if event.keysym == 'Right':
x_speed = abs(self.x_speed)
y_speed = 0
if cur_pos[2] >= self.canvas.winfo_width():
x_speed = 0
if event.keysym == 'Up':
x_speed = 0
y_speed = -(abs(self.y_speed))
if cur_pos[1] <= 0:
y_speed = 0
if event.keysym == 'Down':
x_speed = 0
y_speed = abs(self.y_speed)
if cur_pos[3] >= self.canvas.winfo_height():
y_speed = 0
self.canvas.move(self.paddle, x_speed, y_speed)
def stop(self):
self.x_speed = 0
self.y_speed = 0
class HitBlock:
# x and y are the coordinates of the top-left corner of the hitblock
# HitBlock is another advanced feature to be developed. There will be random hitboxes in the center of the canvas
# The player will try to protect the hitboxes from being hit by the ball
# Every time the ball passes through the hitbox, it is considered as a hit. And the color of the box will be deeper
# When the color of the box is black, the player loses
def __init__(self, canvas, color, x, y):
self.canvas = canvas
self.color = color
self.x = x
self.y = y
self.hit_block = self.canvas.create_rectangle(self.x, self.y, self.x+20, self.y+20, fill=self.color)
canvas.move(self.hit_block, self.x, self.y)
# Setup the tk environment: title and configuration
def setup(tk):
tk.title("Bounce Ball Game")
tk.resizable(0,0)
tk.wm_attributes("-topmost", 0)
# Create hitblock(s) at random locations
def create_hit_blocks(canvas, num_blocks):
hit_blocks = []
i = 0
while i < num_blocks:
x = random.randint(100, 200)
y = random.randint(100, 200)
hit_blocks.append(HitBlock(canvas, "white", x, y))
i = i + 1
return hit_blocks
# Main function
# main loop
def main():
# setup the tkinter
tk = Tk()
setup(tk)
# setup the canvas and initaite the elements
canvas = Canvas(tk, width=500, height=500, bd=0, highlightthickness=0)
canvas.pack()
tk.update()
paddle = Paddle(canvas=canvas, color="blue", x_speed=PADDLE_SPEED[0], y_speed=PADDLE_SPEED[1])
ball = Ball(canvas=canvas, color="red", speed=BALL_SPEED)
game_over_msg = canvas.create_text(250, 200, text="GAME OVER!", state='hidden')
#hit_blocks = create_hit_blocks(canvas, 3)
while True:
tk.update()
ball.move(paddle.paddle)
tk.update()
time.sleep(SLEEP_TIME)
if (ball.hit_bottom()):
ball.stop()
paddle.stop()
time.sleep(1)
canvas.itemconfig(game_over_msg, state='normal')
main()
| {
"content_hash": "1b418b71d1f6d7c3b84a4db7c49b8b12",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 157,
"avg_line_length": 35.16564417177914,
"alnum_prop": 0.5767620376831821,
"repo_name": "VictaLab/victalab_cpsc",
"id": "7965ac03ab68c24f46031994471f3b357a5c06c5",
"size": "5793",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "games/bouncing-ball-game/bounce-ball-game.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14736"
}
],
"symlink_target": ""
} |
import os
import subprocess
import re
import sys
import Locate
import DomainLookup
def Connections(connection_type): #Find Locations of users that are logged in
cmd = ['pinky'] #try to run the pinky command
cmd2 = ['who'] #if pinky fails run who command
if (connection_type == 'ip'): #if the user is looking for ip connections
c = re.compile("\d+\.\d+\.\d+\.\d+")
elif (connection_type == 'domain'): #if they're looking for domain name connections
c = re.compile("\S+\.\S+")
else: #otherwise input error
print("ERROR")
return
try: #try to find connections using pinky
output = subprocess.Popen( cmd, stdout=subprocess.PIPE ).communicate()[0]
ips = c.findall(str(output))
except: #try to find connections using who if pinky fails
output = subprocess.Popen( cmd2, stdout=subprocess.PIPE ).communicate()[0]
ips = c.findall(str(output))
result=[]
for x in ips: #this removes extra characters picked up
result.append(x.replace('\\t','').replace("\\n'", '').replace('(', '').replace(')', ''))
return result
def LocalUsers():
IPs_Connected = Connections('ip')
Domains = Connections('domain')
Domain_IPs = []
for domain in Domains:
Domain_IPs.append( DomainLookup.Domain_to_IP(domain) )
i = 0
output = ''
for domain in Domain_IPs:
output = output + (Domains[i] + ' : ' + domain) + '\n'
output = output + (Locate.Locate(domain)) + '\n'
i = i+1
for ip in IPs_Connected:
output = output + ip + '\n'
output = output + Locate.Locate(ip) + '\n'
return output
| {
"content_hash": "1a30943ba460fd13517b8ea3bcdd48f3",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 96,
"avg_line_length": 35.191489361702125,
"alnum_prop": 0.6088270858524788,
"repo_name": "NickTGraham/PythonPack",
"id": "e501af7cd07bbc2135c92f1c008f5def67472c75",
"size": "1676",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Connections.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "27929"
},
{
"name": "Shell",
"bytes": "98"
}
],
"symlink_target": ""
} |
'''
This script is intended to evaluate dataset using SVM and 10-folds cross validation
'''
import collections
import csv
import datetime
import json
import os
import random
import re
import sys
import time as t
import nltk
import nltk.classify
from nltk.metrics import scores
from sklearn.svm import LinearSVC
from modules import cleaner, tokenizer
fold = 10
def tweet_features(tweet):
features = {}
tweet = cleaner.clean(tweet)
for word in tweet.split():
features["{}".format(word)] = tweet.count(word)
return features
def f1(precision, recall):
return 2 * ((precision * recall) / (precision + recall))
with open(os.path.join(os.path.dirname(__file__), 'tweets_corpus/clean/distinct_traffic_tweets.csv'), newline='\n') as csv_input:
dataset = csv.reader(csv_input, delimiter=',', quotechar='"')
traffic_tweets = [(line[0], line[1]) for line in dataset]
with open(os.path.join(os.path.dirname(__file__), 'tweets_corpus/clean/distinct_non_traffic_tweets.csv'), newline='\n') as csv_input:
dataset = csv.reader(csv_input, delimiter=',', quotechar='"')
non_traffic_tweets = [(line[0], line[1]) for line in dataset]
# random.shuffle(traffic_tweets)
# random.shuffle(non_traffic_tweets)
# if sys.argv[1] == "balance":
# traffic_tweets = traffic_tweets[:min([len(traffic_tweets), len(non_traffic_tweets)])]
# non_traffic_tweets = non_traffic_tweets[:min([len(traffic_tweets), len(non_traffic_tweets)])]
labeled_tweets = (traffic_tweets + non_traffic_tweets)
random.shuffle(labeled_tweets)
print('Start analysis with total:', len(labeled_tweets), 'data')
print('Traffic tweets:', len(traffic_tweets),'data')
print('Non traffic tweets:', len(non_traffic_tweets),'data')
times = []
true_positives = []
true_negatives = []
false_positives = []
false_negatives = []
accuracies = []
precisions = []
recalls = []
f_measures = []
for i in range(fold):
train_set = [(tweet_features(tweet), category) for (tweet, category) in labeled_tweets[0 : i * int(len(labeled_tweets) / fold)]] + \
[(tweet_features(tweet), category) for (tweet, category) in labeled_tweets[(i + 1) * int(len(labeled_tweets) / fold) : len(labeled_tweets)]]
test_set = [(tweet_features(tweet), category) for (tweet, category) in labeled_tweets[i * int(len(labeled_tweets) / fold) : (i + 1) * int(len(labeled_tweets) / fold)]]
print('\nIteration', (i + 1))
print('Training data:', len(train_set), 'data')
print('Test data:', len(test_set), 'data')
# SVM
start_time = t.time()
svm_classifier = nltk.classify.SklearnClassifier(LinearSVC(max_iter=10000)).train(train_set)
time = round(t.time() - start_time, 2)
accuracy = nltk.classify.accuracy(svm_classifier, test_set)
true_positive = 0
true_negative = 0
false_positive = 0
false_negative = 0
for i, (feature, label) in enumerate(test_set):
observed = svm_classifier.classify(feature)
if label == 'traffic' and observed == 'traffic':
true_positive += 1
if label == 'non_traffic' and observed == 'non_traffic':
true_negative += 1
if label == 'traffic' and observed == 'non_traffic':
false_positive += 1
if label == 'non_traffic' and observed == 'traffic':
false_negative += 1
precision = true_positive / (true_positive + false_positive)
recall = true_positive / (true_positive + false_negative)
f_measure = f1(precision, recall)
times.append(time)
true_positives.append(true_positive)
true_negatives.append(true_negative)
false_positives.append(false_positive)
false_negatives.append(false_negative)
accuracies.append(accuracy)
precisions.append(precision)
recalls.append(recall)
f_measures.append(f_measure)
print('SVM Classifier:')
print('\t', 'Training time:', time)
print('\t', 'True positive:', true_positive)
print('\t', 'True negative:', true_negative)
print('\t', 'False positive:', false_positive)
print('\t', 'False negative:', false_negative)
print('\t', 'Accuracy:', accuracy)
print('\t', 'Precision:', precision)
print('\t', 'Recall:', recall)
print('\t', 'F-Measure:', f_measure)
print('\nSVM Classifier:')
print('\tAverage training time:', sum(times) / len(times))
print('\tAverage true positive:', sum(true_positives) / len(true_positives))
print('\tAverage true negative:', sum(true_negatives) / len(true_negatives))
print('\tAverage false positives:', sum(false_positives) / len(false_positives))
print('\tAverage false negatives:', sum(false_negatives) / len(false_negatives))
print('\tAverage accuracy:', sum(accuracies) / len(accuracies))
print('\tAverage precision:', sum(precisions) / len(precisions))
print('\tAverage recall:', sum(recalls) / len(recalls))
print('\tAverage F-Measure:', sum(f_measures) / len(f_measures)) | {
"content_hash": "0ec8a4fa2e37ed193b030c351fab47a5",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 171,
"avg_line_length": 36.93181818181818,
"alnum_prop": 0.6703589743589744,
"repo_name": "dwiajik/twit-macet-mining-v3",
"id": "b654009048b396901d66a46f8a6d221dd342babb",
"size": "4875",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "evaluate_ten_folds.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "63706"
}
],
"symlink_target": ""
} |
from ctypes import c_double
import nidaqmx
import numpy as np
from nidaqmx.constants import AcquisitionType, TerminalConfiguration
from nidaqmx.stream_readers import AnalogMultiChannelReader
from nidaqmx._task_modules.read_functions import _read_analog_f_64
from toon.input.device import BaseDevice
class ForceKeyboard(BaseDevice):
shape = (2,)
ctype = c_double
def __init__(self, sampling_frequency=250, indices=[7, 8], **kwargs):
super(ForceKeyboard, self).__init__(**kwargs)
self.sampling_frequency = sampling_frequency
self._buffer = np.empty(self.shape, dtype=c_double)
if len(indices) > 2:
raise ValueError('Too many indices for ForceKeyboard.')
self._indices = indices
def enter(self):
# assume first NI DAQ is the one we want
self._device_name = nidaqmx.system.System.local().devices[0].name
chans = [2, 9, 1, 8, 0, 10, 3, 11, 4, 12]
sub_chans = [chans[i] for i in self._indices]
channels = [self._device_name + ('/ai%i' % n) for n in sub_chans]
channels = ','.join(channels)
dev = nidaqmx.Task()
dev.ai_channels.add_ai_voltage_chan(channels,
terminal_config=TerminalConfiguration.RSE)
dev.timing.cfg_samp_clk_timing(self.sampling_frequency,
sample_mode=AcquisitionType.CONTINUOUS)
self._reader = AnalogMultiChannelReader(dev.in_stream)
dev.start()
self._device = dev
def read(self):
#self._reader.read_one_sample(self._buffer, timeout=0.1)
try:
_read_analog_f_64(self._reader._handle, self._buffer, 1, 0)
except Exception:
return None
# TODO: apply calibration?
time = self.clock()
return time, self._buffer
def exit(self):
self._device.stop()
self._device.close()
if __name__ == '__main__':
import time
from toon.input import MpDevice
dev = MpDevice(ForceKeyboard())
times = []
with dev:
start = time.time()
while time.time() - start < 240:
dat = dev.read()
if dat is not None:
time, data = dat
print(data)
times.append(time)
time.sleep(0.016)
times = np.hstack(times)
import matplotlib.pyplot as plt
plt.plot(np.diff(times))
plt.show()
| {
"content_hash": "2f16c93d41dec4f8d4982e396ef969b4",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 86,
"avg_line_length": 32.945945945945944,
"alnum_prop": 0.5918785890073831,
"repo_name": "aforren1/toon",
"id": "e6968e87edf671d541c3529fb138287a75f080c6",
"size": "2438",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example_devices/force_keyboard.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "68773"
}
],
"symlink_target": ""
} |
import os
import sys
g_cur_file = os.path.realpath(__file__)
g_cur_dir = os.path.dirname(g_cur_file)
XCODE_DIR="/Applications/Xcode.app/Contents/Developer"
DST_DIR = os.path.join(g_cur_dir, "../prebuilt-with-ssl/iOS")
DST_DIR = os.path.realpath(DST_DIR)
CURL_DIR = os.path.join(g_cur_dir, "../curl")
CURL_DIR = os.path.realpath(CURL_DIR)
ARCHS = ["armv7", "armv7s", "arm64", "i386", "x86_64"]
HOSTS = ["armv7", "armv7s", "arm", "i386", "x86_64"]
PLATFORMS = ["iPhoneOS", "iPhoneOS", "iPhoneOS" , "iPhoneSimulator", "iPhoneSimulator"]
SDKS = ["iPhoneOS", "iPhoneOS", "iPhoneOS" , "iPhoneSimulator", "iPhoneSimulator"]
IPHONEOS_DEPLOYMENT_TARGET = "6"
def is_executable(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def check_env():
cwd = os.getcwd()
if not os.path.isdir(XCODE_DIR):
print ("You have to install Xcode and the command line tools first")
exit(1)
buildconf_path = os.path.join(CURL_DIR, "buildconf")
buildconf_path = os.path.realpath(buildconf_path)
# print is_executable(buildconf_path)
os.chdir(CURL_DIR)
configure_path = os.path.join(CURL_DIR, "configure")
if not is_executable(configure_path):
print ("Curl needs external tools to be compiled")
print ("Make sure you have autoconf, automake and libtool installed")
return_code = os.system(buildconf_path)
if return_code != 0 :
print ("Error running the buildconf program")
os.chdir(g_cur_dir)
exit(1)
os.chdir(cwd)
def set_env():
os.environ["CC"] = os.path.join(XCODE_DIR, "Toolchains/XcodeDefault.xctoolchain/usr/bin/clang")
os.environ["IPHONEOS_DEPLOYMENT_TARGET"] = "6"
# Build for all the architectures
def do_build_curl(arch, host, platform, sdk):
cwd = os.getcwd()
sysroot = os.path.join(XCODE_DIR, "Platforms/%s.platform/Developer/SDKs/%s.sdk" % (platform, sdk))
os.environ["CFLAGS"] = "-arch %s -pipe -Os -gdwarf-2 -isysroot %s -miphoneos-version-min=%s -fembed-bitcode" % (arch,sysroot, IPHONEOS_DEPLOYMENT_TARGET)
os.environ["LDFLAGS"] = "-arch %s -isysroot %s" % (arch, sysroot)
if platform == "iPhoneSimulator":
os.environ["CPPFLAGS"] = "-D__IPHONE_OS_VERSION_MIN_REQUIRED=%s0000" % IPHONEOS_DEPLOYMENT_TARGET
os.chdir(CURL_DIR)
print "CC:%s" % os.environ["CC"]
print "CFLAGS:%s" % os.environ["CFLAGS"]
print "LDFLAGS:%s" % os.environ["LDFLAGS"]
# print "CPPFLAGS:%s" % os.environ["CPPFLAGS"]
print "IPHONEOS_DEPLOYMENT_TARGET:%s" % os.environ["IPHONEOS_DEPLOYMENT_TARGET"]
cmd = "./configure --host=\"%s-apple-darwin\" --with-darwinssl --enable-static --disable-shared --enable-thread-resolver --disable-verbose --enable-ipv6 --enable-http" % host
cmd += " --disable-ftp --disable-file --disable-ldap --disable-ldaps --disable-rtsp --disable-dict --disable-telnet --disable-tftp --disable-pop3 --disable-imap"
cmd += " --disable-smb --disable-smtp --disable-gopher"
return_code = os.system(cmd)
if return_code != 0:
print ("Error running the cURL configure program")
os.chdir(cwd)
exit(1)
cmd = "make -j4"
return_code = os.system(cmd)
if return_code != 0:
print("Error running the make program")
os.chdir(cwd)
exit(1)
cmd = "mkdir -p %s/%s" % (DST_DIR, arch)
os.system(cmd)
cmd = "cp %s/lib/.libs/libcurl.a %s/%s/" % (CURL_DIR, DST_DIR, arch)
os.system(cmd)
cmd = "cp %s/lib/.libs/libcurl.a %s/libcurl-%s.a" % (CURL_DIR, DST_DIR, arch)
os.system(cmd)
cmd = "make clean"
os.system(cmd)
os.chdir(cwd)
def build_curl():
length = len(ARCHS)
for i in range(length):
do_build_curl(ARCHS[i], HOSTS[i], PLATFORMS[i], SDKS[i])
# Build a single static lib with all the archs in it
def merge_static_library():
cwd = os.getcwd()
os.chdir(DST_DIR)
cmd = "lipo -create -output libcurl.a libcurl-*.a"
os.system(cmd)
cmd = "rm libcurl-*.a"
os.system(cmd)
os.chdir(cwd)
# Copy cURL headers
def copy_curl_headers():
src_dir = os.path.join(CURL_DIR, "include")
cmd = "cp -R %s %s" % (src_dir, DST_DIR)
os.system(cmd)
# Patch headers for 64-bit archs
def patch_headers():
pass
if __name__ == "__main__":
print ("Building curl for ios ...")
print ("Current dir:%s" % g_cur_dir)
check_env()
set_env()
build_curl()
merge_static_library()
copy_curl_headers()
patch_headers() | {
"content_hash": "3f9e0709f5f68f42837a416b810eb745",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 175,
"avg_line_length": 30.100719424460433,
"alnum_prop": 0.6771032504780115,
"repo_name": "sundayliu/curl-android-ios",
"id": "a9ca04c602f61e1757ae094bb7e2f0f54df3592b",
"size": "4233",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "compile-scripts/build_ios.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "338595"
},
{
"name": "C++",
"bytes": "2988"
},
{
"name": "CMake",
"bytes": "14912"
},
{
"name": "Java",
"bytes": "951"
},
{
"name": "Makefile",
"bytes": "199690"
},
{
"name": "Objective-C",
"bytes": "5440"
},
{
"name": "Python",
"bytes": "4233"
},
{
"name": "Shell",
"bytes": "8183"
}
],
"symlink_target": ""
} |
from typing import Any, Dict, List, Set, Union, TYPE_CHECKING
from typing import cast as typecast
from ..common import EnvoyRoute
from ...cache import Cacheable
from ...ir.irhttpmappinggroup import IRHTTPMappingGroup
from ...ir.irbasemapping import IRBaseMapping
from .v2ratelimitaction import V2RateLimitAction
if TYPE_CHECKING:
from . import V2Config # pragma: no cover
DictifiedV2Route = Dict[str, Any]
def v2prettyroute(route: DictifiedV2Route) -> str:
match = route["match"]
key = "PFX"
value = match.get("prefix", None)
if not value:
key = "SRX"
value = match.get("safe_regex", {}).get("regex", None)
if not value:
key = "URX"
value = match.get("unsafe_regex", None)
if not value:
key = "???"
value = "-none-"
match_str = f"{key} {value}"
headers = match.get("headers", {})
xfp = None
host = None
for header in headers:
name = header.get("name", None).lower()
exact = header.get("exact_match", None)
if not name or not exact:
continue
if name == "x-forwarded-proto":
xfp = bool(exact == "https")
elif name == ":authority":
host = exact
match_str += f" {'IN' if not xfp else ''}SECURE"
if host:
match_str += f" HOST {host}"
target_str = "-none-"
if route.get("route"):
target_str = f"ROUTE {route['route']['cluster']}"
elif route.get("redirect"):
target_str = f"REDIRECT"
return f"<V2Route {match_str} -> {target_str}>"
def regex_matcher(config: 'V2Config', regex: str, key="regex", safe_key=None, re_type=None) -> Dict[str, Any]:
# If re_type is specified explicitly, do not query its value from config
if re_type is None:
re_type = config.ir.ambassador_module.get('regex_type', 'safe').lower()
config.ir.logger.debug(f"re_type {re_type}")
# 'safe' is the default. You must explicitly say "unsafe" to get the unsafe
# regex matcher.
if re_type != 'unsafe':
max_size = int(config.ir.ambassador_module.get('regex_max_size', 200))
if not safe_key:
safe_key = "safe_" + key
return {
safe_key: {
"google_re2": {
"max_program_size": max_size
},
"regex": regex
}
}
else:
return {
key: regex
}
def hostglob_matches(glob: str, value: str) -> bool:
if glob == "*": # special wildcard
return True
elif glob.endswith("*"): # prefix match
return value.startswith(glob[:-1])
elif glob.startswith("*"): # suffix match
return value.endswith(glob[1:])
else: # exact match
return value == glob
class V2Route(Cacheable):
def __init__(self, config: 'V2Config', group: IRHTTPMappingGroup, mapping: IRBaseMapping) -> None:
super().__init__()
# Stash SNI and precedence info where we can find it later.
if group.get('sni'):
self['_sni'] = {
'hosts': group['tls_context']['hosts'],
'secret_info': group['tls_context']['secret_info']
}
if group.get('precedence'):
self['_precedence'] = group['precedence']
envoy_route = EnvoyRoute(group).envoy_route
mapping_prefix = mapping.get('prefix', None)
route_prefix = mapping_prefix if mapping_prefix is not None else group.get('prefix')
mapping_case_sensitive = mapping.get('case_sensitive', None)
case_sensitive = mapping_case_sensitive if mapping_case_sensitive is not None else group.get('case_sensitive', True)
runtime_fraction: Dict[str, Union[dict, str]] = {
'default_value': {
'numerator': mapping.get('weight', 100),
'denominator': 'HUNDRED'
}
}
if len(mapping) > 0:
if not 'cluster' in mapping:
config.ir.logger.error("%s: Mapping %s has no cluster? %s", mapping.rkey, route_prefix, mapping.as_json())
self['_failed'] = True
else:
runtime_fraction['runtime_key'] = f'routing.traffic_shift.{mapping.cluster.envoy_name}'
match = {
'case_sensitive': case_sensitive,
'runtime_fraction': runtime_fraction
}
if envoy_route == 'prefix':
match['prefix'] = route_prefix
elif envoy_route == 'path':
match['path'] = route_prefix
else:
# Cheat.
if config.ir.edge_stack_allowed and (self.get('_precedence', 0) == -1000000):
# Force the safe_regex engine.
match.update({
"safe_regex": {
"google_re2": {
"max_program_size": 200,
},
"regex": route_prefix
}
})
else:
match.update(regex_matcher(config, route_prefix))
headers = self.generate_headers(config, group)
if len(headers) > 0:
match['headers'] = headers
query_parameters = self.generate_query_parameters(config, group)
if len(query_parameters) > 0:
match['query_parameters'] = query_parameters
self['match'] = match
# `typed_per_filter_config` is used to pass typed configuration to Envoy filters
typed_per_filter_config = {}
if mapping.get('bypass_error_response_overrides', False):
typed_per_filter_config['envoy.filters.http.response_map'] = {
'@type': 'type.googleapis.com/envoy.extensions.filters.http.response_map.v3.ResponseMapPerRoute',
'disabled': True,
}
else:
# The error_response_overrides field is set on the Mapping as input config
# via kwargs in irhttpmapping.py. Later, in setup(), we replace it with an
# IRErrorResponse object, which itself returns None if setup failed. This
# is a similar pattern to IRCors and IRRetrYPolicy.
#
# Therefore, if the field is present at this point, it means it's a valid
# IRErrorResponse with a 'config' field, since setup must have succeded.
error_response_overrides = mapping.get('error_response_overrides', None)
if error_response_overrides:
# The error reponse IR only has optional response map config to use.
# On this particular code path, we're protected by both Mapping schema
# and CRD validation so we're reasonable confident there is going to
# be a valid config here. However the source of this config is theoretically
# not guaranteed and we need to use the config() method safely, so check
# first before using it.
filter_config = error_response_overrides.config()
if filter_config:
# The error response IR itself guarantees that any resulting config() has
# at least one mapper in 'mappers', so assert on that here.
assert 'mappers' in filter_config
assert len(filter_config['mappers']) > 0
typed_per_filter_config['envoy.filters.http.response_map'] = {
'@type': 'type.googleapis.com/envoy.extensions.filters.http.response_map.v3.ResponseMapPerRoute',
# The ResponseMapPerRoute Envoy config is similar to the ResponseMap filter
# config, except that it is wrapped in another object with key 'response_map'.
'response_map': {
'mappers': filter_config['mappers']
}
}
if mapping.get('bypass_auth', False):
typed_per_filter_config['envoy.filters.http.ext_authz'] = {
'@type': 'type.googleapis.com/envoy.config.filter.http.ext_authz.v2.ExtAuthzPerRoute',
'disabled': True,
}
else:
# Additional ext_auth configuration only makes sense when not bypassing auth.
auth_context_extensions = mapping.get('auth_context_extensions', False)
if auth_context_extensions:
typed_per_filter_config['envoy.filters.http.ext_authz'] = {
'@type': 'type.googleapis.com/envoy.config.filter.http.ext_authz.v2.ExtAuthzPerRoute',
'check_settings': {'context_extensions': auth_context_extensions}
}
if len(typed_per_filter_config) > 0:
self['typed_per_filter_config'] = typed_per_filter_config
request_headers_to_add = group.get('add_request_headers', None)
if request_headers_to_add:
self['request_headers_to_add'] = self.generate_headers_to_add(request_headers_to_add)
response_headers_to_add = group.get('add_response_headers', None)
if response_headers_to_add:
self['response_headers_to_add'] = self.generate_headers_to_add(response_headers_to_add)
request_headers_to_remove = group.get('remove_request_headers', None)
if request_headers_to_remove:
if type(request_headers_to_remove) != list:
request_headers_to_remove = [ request_headers_to_remove ]
self['request_headers_to_remove'] = request_headers_to_remove
response_headers_to_remove = group.get('remove_response_headers', None)
if response_headers_to_remove:
if type(response_headers_to_remove) != list:
response_headers_to_remove = [ response_headers_to_remove ]
self['response_headers_to_remove'] = response_headers_to_remove
host_redirect = group.get('host_redirect', None)
if host_redirect:
# We have a host_redirect. Deal with it.
self['redirect'] = {
'host_redirect': host_redirect.service
}
path_redirect = host_redirect.get('path_redirect', None)
prefix_redirect = host_redirect.get('prefix_redirect', None)
regex_redirect = host_redirect.get('regex_redirect', None)
response_code = host_redirect.get('redirect_response_code', None)
# We enforce that only one of path_redirect or prefix_redirect is set in the IR.
# But here, we just prefer path_redirect if that's set.
if path_redirect:
self['redirect']['path_redirect'] = path_redirect
elif prefix_redirect:
# In Envoy, it's called prefix_rewrite.
self['redirect']['prefix_rewrite'] = prefix_redirect
elif regex_redirect:
# In Envoy, it's called regex_rewrite.
self['redirect']['regex_rewrite'] = {
'pattern': {
'google_re2': {},
'regex': regex_redirect.get('pattern', '')
},
'substitution': regex_redirect.get('substitution', '')
}
# In Ambassador, we express the redirect_reponse_code as the actual
# HTTP response code for operator simplicity. In Envoy, those codes
# are represented as an enum, so do the translation here.
if response_code:
if response_code == 301:
enum_code = 0
elif response_code == 302:
enum_code = 1
elif response_code == 303:
enum_code = 2
elif response_code == 307:
enum_code = 3
elif response_code == 308:
enum_code = 4
else:
config.ir.post_error(
f"Unknown redirect_response_code={response_code}, must be one of [301, 302, 303,307, 308]. Using default redirect_response_code=301")
enum_code = 0
self['redirect']['response_code'] = enum_code
return
# Take the default `timeout_ms` value from the Ambassador module using `cluster_request_timeout_ms`.
# If that isn't set, use 3000ms. The mapping below will override this if its own `timeout_ms` is set.
default_timeout_ms = config.ir.ambassador_module.get('cluster_request_timeout_ms', 3000)
route = {
'priority': group.get('priority'),
'timeout': "%0.3fs" % (mapping.get('timeout_ms', default_timeout_ms) / 1000.0),
'cluster': mapping.cluster.envoy_name
}
idle_timeout_ms = mapping.get('idle_timeout_ms', None)
if idle_timeout_ms is not None:
route['idle_timeout'] = "%0.3fs" % (idle_timeout_ms / 1000.0)
regex_rewrite = self.generate_regex_rewrite(config, group)
if len(regex_rewrite) > 0:
route['regex_rewrite'] = regex_rewrite
elif mapping.get('rewrite', None):
route['prefix_rewrite'] = mapping['rewrite']
if 'host_rewrite' in mapping:
route['host_rewrite'] = mapping['host_rewrite']
if 'auto_host_rewrite' in mapping:
route['auto_host_rewrite'] = mapping['auto_host_rewrite']
hash_policy = self.generate_hash_policy(group)
if len(hash_policy) > 0:
route['hash_policy'] = [ hash_policy ]
cors = None
if "cors" in group:
cors = group.cors
elif "cors" in config.ir.ambassador_module:
cors = config.ir.ambassador_module.cors
if cors:
# Duplicate this IRCORS, then set its group ID correctly.
cors = cors.dup()
cors.set_id(group.group_id)
route['cors'] = cors.as_dict()
retry_policy = None
if "retry_policy" in group:
retry_policy = group.retry_policy.as_dict()
elif "retry_policy" in config.ir.ambassador_module:
retry_policy = config.ir.ambassador_module.retry_policy.as_dict()
if retry_policy:
route['retry_policy'] = retry_policy
# Is shadowing enabled?
shadow = group.get("shadows", None)
if shadow:
shadow = shadow[0]
weight = shadow.get('weight', 100)
route['request_mirror_policy'] = {
'cluster': shadow.cluster.envoy_name,
'runtime_fraction': {
'default_value': {
'numerator': weight,
'denominator': 'HUNDRED'
}
}
}
# Is RateLimit a thing?
rlsvc = config.ir.ratelimit
if rlsvc:
# Yup. Build our labels into a set of RateLimitActions (remember that default
# labels have already been handled, as has translating from v0 'rate_limits' to
# v1 'labels').
if "labels" in group:
# The Envoy RateLimit filter only supports one domain, so grab the configured domain
# from the RateLimitService and use that to look up the labels we should use.
rate_limits = []
for rl in group.labels.get(rlsvc.domain, []):
action = V2RateLimitAction(config, rl)
if action.valid:
rate_limits.append(action.to_dict())
if rate_limits:
route["rate_limits"] = rate_limits
# Save upgrade configs.
if group.get('allow_upgrade'):
route["upgrade_configs"] = [ { 'upgrade_type': proto } for proto in group.get('allow_upgrade', []) ]
self['route'] = route
def host_constraints(self, prune_unreachable_routes: bool) -> Set[str]:
"""Return a set of hostglobs that match (a superset of) all hostnames that this route can
apply to.
An emtpy set means that this route cannot possibly apply to any hostnames.
This considers SNI information and (if prune_unreachable_routes) HeaderMatchers that
`exact_match` on the `:authority` header. There are other things that could narrow the set
down more, but that we don't consider (like regex matches on `:authority`), leading to it
possibly returning a set that is too broad. That's OK for correctness, it just means that
we'll emit an Envoy config that contains extra work for Envoy.
"""
# Start by grabbing a list of all the SNI host globs for this route. If there aren't any,
# default to "*".
hostglobs = set(self.get('_sni', {}).get('hosts', ['*']))
# If we're going to do any aggressive pruning here...
if prune_unreachable_routes:
# Note: We're *pruning*; the hostglobs set will only ever get *smaller*, it will never
# grow. If it gets down to the empty set, then we can safely bail early.
# Take all the HeaderMatchers...
header_matchers = self.get("match", {}).get("headers", [])
for header in header_matchers:
# ... and look for ones that exact_match on :authority.
if header.get("name") == ":authority" and "exact_match" in header:
exact_match = header["exact_match"]
if "*" in exact_match:
# A real :authority header will never contain a "*", so if this route has an
# exact_match looking for one, then this route is unreachable.
hostglobs = set()
break # hostglobs is empty, no point in doing more work
elif any(hostglob_matches(glob, exact_match) for glob in hostglobs):
# The exact_match that this route is looking for is matched by one or more
# of the hostglobs; so this route is reachable (so far). Set hostglobs to
# just match that route. Because we already checked if the exact_match
# contains a "*", we don't need to worry about it possibly being interpreted
# incorrectly as a glob.
hostglobs = set([exact_match])
# Don't "break" here--if somehow this route has multiple disagreeing
# HeaderMatchers on :authority, then it's unreachable and we want the next
# iteration of the loop to trigger the "else" clause and prune hostglobs
# down to the empty set.
else:
# The exact_match that this route is looking for isn't matched by any of the
# hostglobs; so this route is unreachable.
hostglobs = set()
break # hostglobs is empty, no point in doing more work
return hostglobs
@classmethod
def get_route(cls, config: 'V2Config', cache_key: str,
irgroup: IRHTTPMappingGroup, mapping: IRBaseMapping) -> 'V2Route':
route: 'V2Route'
cached_route = config.cache[cache_key]
if cached_route is None:
# Cache miss.
# config.ir.logger.info(f"V2Route: cache miss for {cache_key}, synthesizing route")
route = V2Route(config, irgroup, mapping)
# Cheat a bit and force the route's cache_key.
route.cache_key = cache_key
config.cache.add(route)
config.cache.link(irgroup, route)
else:
# Cache hit. We know a priori that it's a V2Route, but let's assert that
# before casting.
assert(isinstance(cached_route, V2Route))
route = cached_route
# config.ir.logger.info(f"V2Route: cache hit for {cache_key}")
# One way or another, we have a route now.
return route
@classmethod
def generate(cls, config: 'V2Config') -> None:
config.routes = []
for irgroup in config.ir.ordered_groups():
if not isinstance(irgroup, IRHTTPMappingGroup):
# We only want HTTP mapping groups here.
continue
if irgroup.get('host_redirect') is not None and len(irgroup.get('mappings', [])) == 0:
# This is a host-redirect-only group, which is weird, but can happen. Do we
# have a cached route for it?
key = f"Route-{irgroup.group_id}-hostredirect"
# Casting an empty dict to an IRBaseMapping may look weird, but in fact IRBaseMapping
# is (ultimately) a subclass of dict, so it's the cleanest way to pass in a completely
# empty IRBaseMapping to V2Route().
#
# (We could also have written V2Route to allow the mapping to be Optional, but that
# makes a lot of its constructor much uglier.)
route = config.save_element('route', irgroup, cls.get_route(config, key, irgroup, typecast(IRBaseMapping, {})))
config.routes.append(route)
# Repeat for our real mappings.
for mapping in irgroup.mappings:
key = f"Route-{irgroup.group_id}-{mapping.cache_key}"
route = cls.get_route(config, key, irgroup, mapping)
if not route.get('_failed', False):
config.routes.append(config.save_element('route', irgroup, route))
@staticmethod
def generate_headers(config: 'V2Config', mapping_group: IRHTTPMappingGroup) -> List[dict]:
headers = []
group_headers = mapping_group.get('headers', [])
for group_header in group_headers:
header = { 'name': group_header.get('name') }
if group_header.get('regex'):
header.update(regex_matcher(config, group_header.get('value'), key='regex_match'))
else:
header['exact_match'] = group_header.get('value')
headers.append(header)
return headers
@staticmethod
def generate_query_parameters(config: 'V2Config', mapping_group: IRHTTPMappingGroup) -> List[dict]:
query_parameters = []
group_query_parameters = mapping_group.get('query_parameters', [])
for group_query_parameter in group_query_parameters:
query_parameter = { 'name': group_query_parameter.get('name') }
if group_query_parameter.get('regex'):
query_parameter.update({
'string_match': regex_matcher(
config,
group_query_parameter.get('value'),
key='regex'
)
})
else:
value = group_query_parameter.get('value', None)
if value is not None:
query_parameter.update({
'string_match': {
'exact': group_query_parameter.get('value')
}
})
else:
query_parameter.update({
'present_match': True
})
query_parameters.append(query_parameter)
return query_parameters
@staticmethod
def generate_hash_policy(mapping_group: IRHTTPMappingGroup) -> dict:
hash_policy = {}
load_balancer = mapping_group.get('load_balancer', None)
if load_balancer is not None:
lb_policy = load_balancer.get('policy')
if lb_policy in ['ring_hash', 'maglev']:
cookie = load_balancer.get('cookie')
header = load_balancer.get('header')
source_ip = load_balancer.get('source_ip')
if cookie is not None:
hash_policy['cookie'] = {
'name': cookie.get('name')
}
if 'path' in cookie:
hash_policy['cookie']['path'] = cookie['path']
if 'ttl' in cookie:
hash_policy['cookie']['ttl'] = cookie['ttl']
elif header is not None:
hash_policy['header'] = {
'header_name': header
}
elif source_ip is not None:
hash_policy['connection_properties'] = {
'source_ip': source_ip
}
return hash_policy
@staticmethod
def generate_headers_to_add(header_dict: dict) -> List[dict]:
headers = []
for k, v in header_dict.items():
append = True
if isinstance(v,dict):
if 'append' in v:
append = bool(v['append'])
headers.append({
'header': {
'key': k,
'value': v['value']
},
'append': append
})
else:
headers.append({
'header': {
'key': k,
'value': v
},
'append': append # Default append True, for backward compatability
})
return headers
@staticmethod
def generate_regex_rewrite(config: 'V2Config', mapping_group: IRHTTPMappingGroup) -> dict:
regex_rewrite = {}
group_regex_rewrite = mapping_group.get('regex_rewrite', None)
if group_regex_rewrite is not None:
pattern = group_regex_rewrite.get('pattern', None)
if (pattern is not None):
regex_rewrite.update(regex_matcher(config, pattern, key='regex',safe_key='pattern', re_type='safe')) # regex_rewrite should never ever be unsafe
substitution = group_regex_rewrite.get('substitution', None)
if (substitution is not None):
regex_rewrite["substitution"] = substitution
return regex_rewrite
| {
"content_hash": "9b369edbe70b87e905f74ed8c26cf964",
"timestamp": "",
"source": "github",
"line_count": 644,
"max_line_length": 161,
"avg_line_length": 40.92701863354037,
"alnum_prop": 0.5440679895283985,
"repo_name": "datawire/ambassador",
"id": "6f397d9053b225c926429f9119a9582845bbfd22",
"size": "26951",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/ambassador/envoy/v2/v2route.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "20990"
},
{
"name": "Go",
"bytes": "564752"
},
{
"name": "HTML",
"bytes": "25150"
},
{
"name": "JavaScript",
"bytes": "32368"
},
{
"name": "Makefile",
"bytes": "113905"
},
{
"name": "Python",
"bytes": "1158187"
},
{
"name": "Shell",
"bytes": "188832"
}
],
"symlink_target": ""
} |
import time
import kbhit
import os
import sys
import signal
import subprocess
"""
call menu.example() for an example
USAGE:
menu.unbuffer_stdin() for normal menu operation, menu.restore_stdin() on
exit or to get normal behavior back.
newMenu=menu.submenu(name,menu_options_list,menu_functions_list,menu_characters_list)
the menu will display a the options from above and when an item from
characters is pressed, the corresponding function will be called.
Additionally, CLEAR_ON_REFRESH,PRINT_ON_REFRESH, USE_BANNER can be cleared
for extra flexibility
Finally, to make sure the menu is not blocking, IDLE_FUNCTION=0 can be set
to a function of the user's
choice and will be repeatedly called until a key is pressed. After every
return from the function, the menu will check for keypresses. A keypress
will not cause an early abort from the idle function
The menu will ignore keys that are not on the characters_list, but will
interrupt on SIGINT.
Also, the arrow keys, up, down, right, left, can be captured with A,B,C,D
respectively, in addition to the letter keys. It may be advisable to not
use those letters if this behavior is not desired.
Also, note that if the input passed in the menu_options_list is greater
than 48 characters, it will be truncated.
If you try to check for '*' then this will match any character that is not
in any other menu_characters_list element.
"""
################################################################################
def example():
unbuffer_stdin()
menu2=submenu("No banner on this menu, refreshes screen constantly",["line3: press 3","other: press any key"],[test3,0],["3","*"])
menu2.USE_BANNER=0
#leave defaults of clear and refresh
MAIN=submenu("MAIN",["line1: press A","line2: press B","another menu: press c"],[test1,test2,menu2.display ],["A","b","c"])
MAIN.CLEAR_ON_REFRESH=0
MAIN.PRINT_ON_REFRESH=0
#MAIN.USE_BANNER=0
MAIN.display();
restore_stdin()
print "Have now exited and stdin restored."
def test1():
print "test1"
def test2():
print "test2"
def test3():
print "test3"
################################################################################
def signal_handler(signal,frame):
kbhit.restore_stdin()
print "Caught SIGINT "
print "Restoring stdin"
kbhit.restore_stdin()
exit(0)
def restore_stdin():
kbhit.restore_stdin()
def unbuffer_stdin():
kbhit.unbuffer_stdin()
def banner():
print "*" * 80
print " " * 35+"Welcome to"
print "*" * 80
print " " * 12 +" ___ _ _ _ ____ ____ _ ____ _____"
print " " * 12 +" / _ \| | | | / \ | _ \ / ___| / \ | _ \| ____|"
print " " * 12 +" | | | | | | |/ _ \ | | | | | / _ \ | |_) | _| "
print " " * 12 +" | |_| | |_| / ___ \| |_| | |___ / ___ \| __/| |___ "
print " " * 12 +" \__\_\\___/_/ \_\____/ \____/_/ \_\_| |_____|"
print "*" * 80
print " " * 32 + "Raising innovation"
print "*" * 80
return
def print_menu(title, lines):
LINE_LENGTH=48
title_len=len(title)
spaces=(10+LINE_LENGTH-title_len)/2
if (len(title) % 2)==0:
offset=0
else:
offset=1
print "\n"+" " *10 + "*"*60+"\n"+" "*10+"*"+" "*spaces+title+" "*(spaces+offset)+"*\n"+" " *10 + "*"*60+"\n"+" "*10+"*"+" "*58+"*\n",
for line in lines:
print " " *10+"* ",
if len(line)<LINE_LENGTH:
print line,
else:
print line[0:LINE_LENGTH],
print " "* (LINE_LENGTH-len(line))+" *"
print " "*10+"*"+" "*58+"*\n"+" " *10 + "*"*60+"\n"
return
class submenu:
menu_array=[]
depth=0
def __init__(self,title_in,items_in,functions_in,keys_in):
#############################################
#User callable outside
self.CLEAR_ON_REFRESH=1
self.PRINT_ON_REFRESH=1
self.USE_BANNER=1
self.IDLE_FUNCTION=0
self.ONCE_FUNCTION=0
self.NO_RETURN_AFTER_KEY=0
#############################################
self.title=title_in
self.items=items_in
self.functions=functions_in
self.keys=keys_in
self.RETURN_KEY=0
for x in range(0,len(self.keys)):
self.keys[x]=self.keys[x].lower()
def display(self):
submenu.depth+=1
index = 1
while index < len(submenu.menu_array):
if submenu.menu_array[index] == submenu.menu_array[index-1]:
submenu.menu_array.pop(index)
index -= 1
index += 1
submenu.menu_array.append(self)
print submenu.menu_array
noresponse=1
self.refresh(1)
if self.ONCE_FUNCTION!=0:
self.ONCE_FUNCTION()
while noresponse>0:
if self.IDLE_FUNCTION!=0:
self.IDLE_FUNCTION()
if kbhit.kbhit()>0:
char=kbhit.getch().lower()
for x in range(0,len(self.keys)):
#if we find a key then call the function
if char==self.keys[x]:
if self.functions[x]:
self.functions[x]()
noresponse=0 +self.NO_RETURN_AFTER_KEY-self.RETURN_KEY
#if the key is not found but we have a wild card, call teh function
elif self.keys[x]=='*':
if self.functions[x]:
self.functions[x]()
noresponse=0 +self.NO_RETURN_AFTER_KEY-self.RETURN_KEY
#now that keypress has been evaluated, refresh if proper flags are set
self.refresh()
if len(submenu.menu_array)>0:
submenu.menu_array.pop()
submenu.depth-=1
self.RETURN_KEY=0
def ret(self):
self.RETURN_KEY=1
def refresh(self,force=0):
if force==1:
try:
subprocess.call(["clear"])
except OSError:
subprocess.call(["cls"])
finally:
if self.USE_BANNER==1:
banner()
print_menu(self.title,self.items)
return
if self.CLEAR_ON_REFRESH==1:
try:
subprocess.call(["clear"])
except OSError:
subprocess.call(["cls"])
if self.PRINT_ON_REFRESH==1:
if self.USE_BANNER==1:
banner()
print_menu(self.title,self.items)
return
def back():
submenu.menu_array.pop(len(submenu.menu_array)-2).display()
def current():
submenu.menu_array.pop(len(submenu.menu_array)-1).display()
signal.signal(signal.SIGINT, signal_handler)
#example()
| {
"content_hash": "4a3c1427b94587b238405b4186d61f5f",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 134,
"avg_line_length": 29.728643216080403,
"alnum_prop": 0.6064908722109533,
"repo_name": "srazojr/QUADCAPE",
"id": "44d23847fde143f5f2dfe0d7831c5ab304f3a8fb",
"size": "5926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/menu.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "755"
},
{
"name": "C",
"bytes": "5027230"
},
{
"name": "C++",
"bytes": "1049711"
},
{
"name": "Objective-C",
"bytes": "69532"
},
{
"name": "OpenEdge ABL",
"bytes": "2655"
},
{
"name": "Python",
"bytes": "100369"
},
{
"name": "Shell",
"bytes": "9401"
}
],
"symlink_target": ""
} |
"""
# Licensed to the Apache Software Foundation (ASF) under one *
# or more contributor license agreements. See the NOTICE file *
# distributed with this work for additional information *
# regarding copyright ownership. The ASF licenses this file *
# to you under the Apache License, Version 2.0 (the *
# "License"); you may not use this file except in compliance *
# with the License. You may obtain a copy of the License at *
# *
# http://www.apache.org/licenses/LICENSE-2.0 *
# *
# Unless required by applicable law or agreed to in writing, *
# software distributed under the License is distributed on an *
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
# KIND, either express or implied. See the License for the *
# specific language governing permissions and limitations *
# under the License.
"""
from __future__ import absolute_import
from ..msg.Field import *
from ..msg.ImportExportHelper import *
from ..msg.Message import *
from ..msg.StructValue import *
from ..msg.Type import *
from ..msg.ValueFactory import *
from ..util.DateSerializer import *
from ..util.ListSerializer import *
from ..util.MapSerializer import *
from ..util.SetSerializer import *
from .Validator_RuntimeException import *
from .Validator_long import *
class DefaultValueFactory(ValueFactory):
"""
Default implementation of ValueFactory which provides some
dynamic type and field support, as well as standard value
conversions and import and rt.
"""
# Names
ETCH_RUNTIME_EXCEPTION_TYPE_NAME = "_Etch_RuntimeException"
ETCH_LIST_TYPE_NAME = "_Etch_List"
ETCH_MAP_TYPE_NAME = "_Etch_Map"
ETCH_SET_TYPE_NAME = "_Etch_Set"
ETCH_DATETIME_TYPE_NAME = "_Etch_Datetime"
ETCH_AUTH_EXCEPTION_TYPE_NAME = "_Etch_AuthException"
ETCH_EXCEPTION_MESSAGE_NAME = "_exception"
MSG_FIELD_NAME = "msg"
MESSAGE_ID_FIELD_NAME = "_messageId"
IN_REPLY_TO_FIELD_NAME = "_inReplyTo"
RESULT_FIELD_NAME = "result"
# Fields
_mf_msg = Field(MSG_FIELD_NAME)
"""The msg field of the standard unchecked exception"""
_mf__messageId = Field(MESSAGE_ID_FIELD_NAME)
"""The well-known _messageId field"""
_mf__inReplyTo = Field(IN_REPLY_TO_FIELD_NAME)
"""The well-known _inReplyTo field"""
_mf_result = Field(RESULT_FIELD_NAME)
"""The well-known result field"""
@staticmethod
def init(typs, class2type):
"""
Initializes the standard types and fields needed by all
etch generated value factories.
@param types
@param class2type
"""
cls = DefaultValueFactory
RuntimeExceptionSerialzier.init(typs[cls.ETCH_RUNTIME_EXCEPTION_TYPE_NAME], class2type)
ListSerialzier.init(typs[cls.ETCH_LIST_TYPE_NAME], class2type)
MapSerialzier.init(typs[cls.ETCH_MAP_TYPE_NAME], class2type)
SetSerialzier.init(typs[cls.ETCH_SET_TYPE_NAME], class2type)
DateSerialzier.init(typs[cls.ETCH_DATETIME_TYPE_NAME], class2type)
AuthExceptionSerialzier.init(typs[cls.ETCH_AUTH_EXCEPTION_TYPE_NAME], class2type)
# _mt__Etch_AuthException
t = typs.get(cls.ETCH_EXCEPTION_MESSAGE_NAME)
t.putValidator( cls._mf_result, Validator_RuntimeException.get())
t.putValidator( cls._mf__messageId, Validator_long.get(0))
t.putValidator( cls._mf__inReplyTo, Validator_long.get(0))
def __init__(self, typs, class2type):
"""
Constructs the DefaultValueFactory.
@param typs
@param class2type
"""
cls = self.__class__
self.__types = typs
self.__class2type = class2type
self._mt__Etch_RuntimeException = typs.get(cls.ETCH_RUNTIME_EXCEPTION_TYPE_NAME)
self._mt__Etch_List = typs.get(cls.ETCH_LIST_TYPE_NAME)
self._mt__Etch_Map = typs.get(cls.ETCH_MAP_TYPE_NAME)
self._mt__Etch_Set = typs.get(cls.ETCH_SET_TYPE_NAME)
self._mt__Etch_Datetime = typs.get(cls.ETCH_DATETIME_TYPE_NAME)
self._mt__Etch_AuthException = typs.get(cls.ETCH_AUTH_EXCEPTION_TYPE_NAME)
self._mt__exception = typs.get(cls.ETCH_EXCEPTION_MESSAGE_NAME)
def get_mt__Etch_RuntimeException(self):
return _mt__Etch_RuntimeException
| {
"content_hash": "d6ddfcf5dc44c7ae884c0f1c5ac72054",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 95,
"avg_line_length": 38.56521739130435,
"alnum_prop": 0.649379932356257,
"repo_name": "OBIGOGIT/etch",
"id": "33de6ae9085f7ec1ac8ce80be8796f5c5d9f8c6a",
"size": "4435",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "binding-python/runtime/src/main/python/etch/binding/support/DefaultValueFactory.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2513090"
},
{
"name": "C#",
"bytes": "1514713"
},
{
"name": "C++",
"bytes": "1109601"
},
{
"name": "CSS",
"bytes": "143"
},
{
"name": "Go",
"bytes": "158833"
},
{
"name": "Java",
"bytes": "2451144"
},
{
"name": "Perl",
"bytes": "290"
},
{
"name": "Python",
"bytes": "444086"
},
{
"name": "Shell",
"bytes": "62900"
},
{
"name": "VimL",
"bytes": "13679"
},
{
"name": "XSLT",
"bytes": "12890"
}
],
"symlink_target": ""
} |
import pytest
from coolcantonese.phonetic import (
NotationMarker,
fetch_symbols,
)
@pytest.fixture
def marker():
return NotationMarker("coolcantonese/data/phonetic-data.txt")
# def get_noted_chars(in_str):
# return _default.get_noted_chars(in_str)
# def get_symbols(char):
# return _default.get_symbols(char)
# def get_chars(symbols):
# return _default.get_chars(symbols)
def test_get_noted_chars(marker):
# import sys
# reload(sys)
# sys.setdefaultencoding("utf-8")
# print len(_default.char_map)
in_str = u"屎窟"
r = marker.get_noted_chars(in_str)
print(r)
# print("")
# r = get_symbols(u"中")
# print(r)
# print
# r = get_chars("zung1")
# print(r)
def test_fetch_symbols():
noted_chars = fetch_symbols(u"度")
for p in noted_chars:
print(p)
| {
"content_hash": "d788b1290509b3b2b3aeaf874c81b020",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 65,
"avg_line_length": 18.608695652173914,
"alnum_prop": 0.6179906542056075,
"repo_name": "kk17/CoolCantonese",
"id": "a35510eb64287f61899025688602a09b965c1a60",
"size": "926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_phonetic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "529"
},
{
"name": "Python",
"bytes": "53868"
},
{
"name": "Shell",
"bytes": "369"
}
],
"symlink_target": ""
} |
"""
WSGI config for seed_control_interface project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE",
"seed_control_interface.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
| {
"content_hash": "f79720f5edc3cf25ddb1e11e2121bfb0",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 78,
"avg_line_length": 28.105263157894736,
"alnum_prop": 0.7621722846441947,
"repo_name": "praekelt/seed-control-interface",
"id": "746dd79fd3edf518656614a99524dcdca3b6b69f",
"size": "534",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "seed_control_interface/wsgi.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "487398"
},
{
"name": "Dockerfile",
"bytes": "323"
},
{
"name": "HTML",
"bytes": "89777"
},
{
"name": "JavaScript",
"bytes": "311756"
},
{
"name": "Python",
"bytes": "169576"
}
],
"symlink_target": ""
} |
""" Google Hangouts Relay """
import os
import logging
import asyncio
import threading
import hangups
from .relay import Relay
LOG = logging.getLogger(__name__)
class HangoutsRelay(Relay):
""" Hangouts Relay implementation that handles relays to and from an mq service. """
RELAY_ARG_WHITELIST = ['auth_token_path', 'relay_client_id', 'hangouts_conversation_id']
def __init__(self, auth_token_path, relay_client_id, hangouts_conversation_id, mq_client):
self._auth_token_path = os.path.expanduser(auth_token_path)
self._relay_client_id = relay_client_id
self._hangouts_conversation_id = hangouts_conversation_id
self._mq_client = mq_client
super(HangoutsRelay, self).__init__()
def get_relay_client_id(self):
""" Return relay_client_id """
return self._relay_client_id
def _init_sub_client(self):
""" Initialize the mq subscribing client """
self._mq_client.subscribe(on_relay_out=self.relay_out)
def run(self):
"""
Called to run this module. This is blocking for the life of the application in order to keep clients alive
"""
LOG.info("Running relay")
self._init_sub_client()
self._connect()
def stop(self):
""" Called to stop this module. Should do any relay clean up here. """
LOG.info("Stopping relay")
self._mq_client.unsubscribe()
if self._client:
self._client.disconnect()
def _connect(self):
"""
Contains logic for setting up hangups client (Hangouts unofficial API module). Runs for the duration of the app.
"""
# This will run in a thread w/o an instantiated event loop
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
cookies = hangups.get_auth_stdin(self._auth_token_path)
client = hangups.Client(cookies)
client.on_connect.add_observer(self._on_connect)
client.on_state_update.add_observer(self._on_state_update)
self._client = client
loop.run_until_complete(self._client.connect())
# client.on_connect
# client.on_disconnect
# client.on_reconnect
def _on_connect(self):
""" Log client info on hangups client connect """
self_info = hangups.hangouts_pb2.GetSelfInfoRequest(request_header=self._client.get_request_header(),)
LOG.info("Hangouts self info: " + str(self_info))
def _on_state_update(self, state_update):
"""
This callback runs for any updates from Hangouts, from messages to hover events. Therefore we filter just for
new messages.
"""
LOG.debug("state_update: " + str(state_update))
if state_update.HasField('conversation') and \
HangoutsRelay._is_in_conversation(state_update, self._hangouts_conversation_id):
segments = list(state_update.event_notification.event.chat_message.message_content.segment)
self.relay_in({"message": "".join([x.text for x in segments])})
@staticmethod
def _is_not_duplicate(state_update):
"""
With the Hangouts unofficial API, we need to dedupe messages from our sending to Hangouts, and legitimate
messages from other parties. This checks for relevant necessary information to make sure a new message
is not simply an earlier relay_out message.
"""
return state_update.HasField('event_notification') and \
state_update.event_notification.HasField('event') and \
state_update.event_notification.event.HasField('self_event_state') and \
state_update.event_notification.event.HasField('conversation_id') and \
state_update.event_notification.event.conversation_id.HasField('id') and \
state_update.event_notification.event.self_event_state.HasField('user_id') and \
state_update.event_notification.event.self_event_state.user_id.HasField('chat_id') and \
state_update.event_notification.event.HasField('sender_id') and \
state_update.event_notification.event.sender_id.HasField('chat_id') and \
str(state_update.event_notification.event.self_event_state.user_id.chat_id) != \
str(state_update.event_notification.event.sender_id.chat_id)
@staticmethod
def _is_in_conversation(state_update, conversation_id):
""" Only relay chats in the conversation_id configured for this relay """
received_chat_id = str(state_update.event_notification.event.conversation_id.id)
LOG.info("Received conversation id: %s", received_chat_id)
return HangoutsRelay._is_not_duplicate(state_update) and received_chat_id == str(conversation_id)
def relay_out(self, payload):
""" Builds request to send as Hangouts message """
LOG.info("relay_out for payload")
request = hangups.hangouts_pb2.SendChatMessageRequest(
request_header=self._client.get_request_header(),
event_request_header=hangups.hangouts_pb2.EventRequestHeader(
conversation_id=hangups.hangouts_pb2.ConversationId(
id=self._hangouts_conversation_id
),
client_generated_id=self._client.get_client_generated_id(),
),
message_content=hangups.hangouts_pb2.MessageContent(
segment=[hangups.ChatMessageSegment(payload["message"]).serialize()],
),
)
thread = threading.Thread(target=self._send_message, args=(request,))
thread.start()
def _send_message(self, request):
""" hangups logic for sending a message """
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(self._client.send_chat_message(request))
loop.close()
def relay_in(self, payload):
""" Publishing client that publishes and disconnects from mq service """
LOG.info("relay_in for payload")
if self._client:
LOG.info("mq client found, publishing")
self._mq_client.publish(payload)
else:
raise Exception("Relay has not yet been run, call run()")
| {
"content_hash": "fd2b9d5a81a05a4346fe722aebd2e752",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 120,
"avg_line_length": 43.52447552447553,
"alnum_prop": 0.6446015424164524,
"repo_name": "nerdfarm/obs",
"id": "604740906b80ebd1acd2d6da9214ff0c5903d43c",
"size": "6273",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "obs/relay/hangouts_relay.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "30258"
}
],
"symlink_target": ""
} |
import ConfigParser, Image, aztk_config
display_sizes_cfg = ConfigParser.ConfigParser()
display_sizes_cfg.read('%s/etc/display_sizes.cfg' % aztk_config.aztk_root)
display_sizes = {}
for size in display_sizes_cfg.sections():
display_sizes[size] = {
'width': display_sizes_cfg.getint(size, 'width'),
'height': display_sizes_cfg.getint(size, 'height'),
'fit_size': display_sizes_cfg.getboolean(size, 'fit_size'),
'in_use': display_sizes_cfg.getboolean(size, 'in_use'),
'quality': display_sizes_cfg.getint(size, 'quality'),
}
## if display_sizes_cfg.get(size, 'method') == "NEAREST": display_sizes[size]['method'] = Image.NEAREST
## elif display_sizes_cfg.get(size, 'method') == "ANTIALIAS": display_sizes[size]['method'] = Image.ANTIALIAS
## else: raise ValueError, 'Invalid value for method in display_sizes.cfg: %s' % display_sizes_cfg.get(size, 'method')
def get_size(width, height, fit_size):
for size, info in display_sizes.items():
if int(info['width']) == int(width) and \
int(info['height']) == int(height) and \
bool(info['fit_size']) == bool(fit_size):
return size
| {
"content_hash": "e8ccc611da925c0da3badc46e13b2618",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 119,
"avg_line_length": 46.125,
"alnum_prop": 0.6856368563685636,
"repo_name": "kordless/zoto-server",
"id": "5b9580ced117c075ccde97c10c6d0c384742cc6c",
"size": "1107",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aztk/lib/display_sizes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "1489011"
},
{
"name": "PHP",
"bytes": "15394"
},
{
"name": "Python",
"bytes": "905967"
},
{
"name": "Shell",
"bytes": "1052"
}
],
"symlink_target": ""
} |
def extractShainagtranslationsWordpressCom(item):
'''
Parser for 'shainagtranslations.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
if item['tags'] == ['Uncategorized']:
titlemap = [
('The Lady’s Sickly Husband Ch.', 'The Lady\'s Sickly Husband', 'translated'),
('Rebirth of the Tyrant’s Pet Ch.', 'Rebirth of the Tyrant\'s Pet', 'translated'),
('The Frog Prince and the Witch Ch.', 'The Frog Prince and the Witch', 'translated'),
('MLVF Ch.', 'The Male Lead’s Villainess Fiancée', 'translated'),
('RotFK Ch. ', 'Return of the Female Knight', 'translated'),
('ATP Ch.', 'Avoid the Protagonist!', 'translated'),
('MLSW Ch.', 'The Male Lead’s Substitute Wife', 'translated'),
('IDATIAC Ch. ', 'I Died And Turned Into A Cat', 'translated'),
('TCVCF Ch.', 'The CEO’s Villainess Childhood Friend', 'translated'),
('TTID Ch.', 'Trapped in a Typical Idol Drama', 'translated'),
('HSAG Ch.', 'Heroine Saves A Gentleman', 'translated'),
('INYFL Ch.', 'I\'m Not Your Female Lead', 'translated'),
('DLVM Ch.', 'Daily Life of a Villain\'s Mother', 'translated'),
('Master of Dungeon', 'Master of Dungeon', 'oel'),
]
for titlecomponent, name, tl_type in titlemap:
if titlecomponent.lower() in item['title'].lower():
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | {
"content_hash": "dacdb1b0f24059815c093e5c57ddb583",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 105,
"avg_line_length": 60.5,
"alnum_prop": 0.5165289256198347,
"repo_name": "fake-name/ReadableWebProxy",
"id": "8a8c33054198375cf12701f70249c1debc5c08b7",
"size": "1947",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WebMirror/management/rss_parser_funcs/feed_parse_extractShainagtranslationsWordpressCom.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "105811"
},
{
"name": "Dockerfile",
"bytes": "1178"
},
{
"name": "HTML",
"bytes": "119737"
},
{
"name": "JavaScript",
"bytes": "3006524"
},
{
"name": "Jupyter Notebook",
"bytes": "148075"
},
{
"name": "Mako",
"bytes": "1454"
},
{
"name": "Python",
"bytes": "5264346"
},
{
"name": "Shell",
"bytes": "1059"
}
],
"symlink_target": ""
} |
"""Settings for bull installation."""
from os.path import abspath, dirname, join
_cwd = dirname(abspath(__file__))
# Subject of the email sent after purchase
# MAIL_SUBJECT =
# Email address for the 'from' field of the generated email
# MAIL_FROM =
# Email server address
# MAIL_SERVER =
# Email server username
# MAIL_USERNAME =
# Email server password
# MAIL_PASSWORD =
# Email server port
# MAIL_PORT =
# Use SSL for email?
# MAIL_USE_SSL =
# Website address, for use in Stripe purchases and in email
# SITE_ADDRESS =
# Database URI for SQLAlchmey (Default: 'sqlite+pysqlite3:///sqlite3.db')
# SQLALCHEMY_DATABASE_URI = 'sqlite+pysqlite:///sqlite3.db'
# Stripe secret key to be used to process purchases
STRIPE_SECRET_KEY = 'foo'
# Stripe public key to be used to process purchases
STRIPE_PUBLIC_KEY = 'bar'
| {
"content_hash": "9cf5d1271f06a6fd64319fddc882353a",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 73,
"avg_line_length": 22.833333333333332,
"alnum_prop": 0.7214111922141119,
"repo_name": "abdelinho24/flask-shop",
"id": "fe9fa2dad302926efd43ba0762bb3eaab76673c7",
"size": "822",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [],
"symlink_target": ""
} |
"""The classes in this module represent objects sent and received from
the XMS REST API.
"""
from __future__ import absolute_import, division, print_function
class ReportType(object):
"""A collection of known delivery report types.
These values are known to be valid in
:py:attr:`MtSmsBatch.delivery_report`.
"""
NONE = 'none'
SUMMARY = 'summary'
FULL = 'full'
PER_RECIPIENT = 'per_recipient'
class DeliveryStatus(object):
"""A collection of known delivery statuses.
Note, new statuses may be introduced to the XMS API.
"""
QUEUED = "Queued"
"""Message is queued within REST API system and will be dispatched
according to the rate of the account."""
DISPATCHED = "Dispatched"
"""Message has been dispatched and accepted for delivery by the SMSC."""
ABORTED = "Aborted"
"""Message was aborted before reaching SMSC."""
REJECTED = "Rejected"
"""Message was rejected by SMSC."""
DELIVERED = "Delivered"
"""Message has been delivered."""
FAILED = "Failed"
"""Message failed to be delivered."""
EXPIRED = "Expired"
"""Message expired before delivery."""
UNKNOWN = "Unknown"
"""It is not known if message was delivered or not."""
class DeliveryReportType(object):
"""The types of delivery reports that can be retrieved."""
SUMMARY = "summary"
"""Indicates a summary batch delivery report.
The summary delivery report does not include the per-recipient
result but rather aggregated statistics about the deliveries.
"""
FULL = "full"
"""Indicates a full batch delivery report.
This includes per-recipient delivery results. For batches with
many destinations such reports may be very large.
"""
class Reset(object):
"""A class whose instances indicate that a value should be reset.
This is used when updating previously created XMS objects. Note,
it is typically not necessary to created new objects of this type,
instead use the constant :const:`.RESET`.
"""
def __init__(self):
pass
RESET = Reset()
"""Object used to indicate that a XMS field should be reset to its
default value."""
class MtBatchSms(object):
"""Base class for all SMS batch classes.
Holds fields that are common to both the create and response
classes.
.. attribute:: recipients
One or more MSISDNs indicating the batch recipients.
:type: set[str]
.. attribute:: sender
The batch sender, typically a short code or long number.
:type: str
.. attribute:: delivery_report
The type of delivery report to use for this batch.
:type: str
.. attribute:: send_at
The time at which this batch should be sent.
:type: datetime
.. attribute:: expire_at
The time at which this batch should expire.
:type: datetime
.. attribute:: callback_url
The URL to which callbacks should be sent.
:type: str
"""
def __init__(self):
self.recipients = set()
self.sender = None
self.delivery_report = None
self.send_at = None
self.expire_at = None
self.callback_url = None
class MtBatchSmsCreate(MtBatchSms):
"""Describes parameters available during batch creation.
We can create two kinds of batches, textual and binary, described
in the child classes :py:class:`MtBatchTextSmsCreate` and
:py:class:`MtBatchTextSmsCreate`, respectively.
.. attribute:: tags
The initial set of tags to give the batch.
:type: set[str]
"""
def __init__(self):
MtBatchSms.__init__(self)
self.tags = set()
class MtBatchTextSmsCreate(MtBatchSmsCreate):
"""Class whose fields describe a text batch.
.. attribute:: body
The message body or template.
:type: str
.. attribute:: parameters
The template parameters.
This property is only relevant is the :py:attr:`body` property
is a template. This is expected to be an associative array
mapping parameter keys to associative arrays themselves mapping
recipient numbers to substitution strings.
More concretely we may have for the parameterized message
"Hello, ${name}!" have::
batch.parameters = {
'name': {
'123456789': 'Mary',
'987654321': 'Joe',
'default': 'valued customer'
}
}
And the recipient with MSISDN "123456789" would then receive the
message "Hello, Mary!".
Note the use of "default" to indicate the substitution for
recipients not explicitly given. For example, the recipient
"555555555" would receive the message "Hello, valued customer!".
:type: dict[str, dict[str, str]]
"""
def __init__(self):
MtBatchSmsCreate.__init__(self)
self.body = None
self.parameters = {}
class MtBatchBinarySmsCreate(MtBatchSmsCreate):
"""Describes a binary batch.
This class holds all parameters that can be used when creating a
binary SMS batch.
.. attribute:: body
The body of this binary message.
:type: bytes
.. attribute:: udh
The User Data Header of this binary message.
:type: bytes
"""
def __init__(self):
MtBatchSmsCreate.__init__(self)
self.body = None
self.udh = None
class MtBatchSmsUpdate(object):
"""Describes updates that can be performed on text and binary SMS
batches.
.. attribute:: recipient_insertions
The message destinations to add to the batch. This should have
zero or more MSISDNs.
:type: set[str]
.. attribute:: recipient_removals
The message destinations to remove from the batch. This should
have zero or more MSISDNs.
:type: set[str]
.. attribute:: sender
The message originator as a long number or short code. If
``None`` then the current value is kept, if :const:`.RESET` then
the value is reset to its XMS default, and if set to a string
the sender is updated.
:type: str or None or Reset
.. attribute:: delivery_report
Description of how to update the batch delivery report value. If
``None`` then the current value is kept, if :const:`.RESET` then
the value is reset to its XMS default, and if set to a string
the delivery report value is updated.
See :class:`ReportType` for valid report types.
:type: str or None or Reset
.. attribute:: send_at
Description of how to update the batch send at value. If
``None`` then the current value is kept, if :const:`.RESET` then
the value is reset to its XMS default, and if set to a date time
the send at value is updated.
:type: datetime or None or Reset
.. attribute:: expire_at
Description of how to update the batch expire at value. If
``None`` then the current value is kept, if :const:`.RESET` then
the value is reset to its XMS default, and if set to a date time
the expire at value is updated.
:type: datetime or None or Reset
.. attribute:: callback_url
Description of how to update the batch callback URL. If ``None``
then the current value is kept, if :const:`.RESET` then the
value is reset to its XMS default, and if set to a string the
callback URL value is updated.
:type: str or None or Reset
"""
def __init__(self):
self.recipient_insertions = set()
self.recipient_removals = set()
self.sender = None
self.delivery_report = None
self.send_at = None
self.expire_at = None
self.callback_url = None
class MtBatchTextSmsUpdate(MtBatchSmsUpdate):
"""Class that the update operations that can be performed on a text
batch.
.. attribute:: body
The updated batch message body. If ``None`` then the current
batch message is kept.
:type: str or None
.. attribute:: parameters
Description of how to update the batch parameters. If ``None``
then the current value is kept, if :const:`.RESET` then the
value is reset to its XMS default, and if set to a dictionary
the parameters value is updated.
:type: dict or None or Reset
"""
def __init__(self):
MtBatchSmsUpdate.__init__(self)
self.body = None
self.parameters = None
class MtBatchBinarySmsUpdate(MtBatchSmsUpdate):
"""Describes updates to a binary SMS batch.
.. attribute:: body
The updated binary batch body. If ``None`` then the existing
body is left as-is.
:type: bytes or None
.. attribute:: udh
The updated binary User Data Header. If ``None`` then the
existing UDH is left as-is.
:type: bytes or None
"""
def __init__(self):
MtBatchSmsUpdate.__init__(self)
self.body = None
self.udh = None
class MtBatchSmsResult(MtBatchSms):
"""Contains the common fields of text and binary batches.
.. attribute:: batch_id
The unique batch identifier.
:type: str
.. attribute:: created_at
Time when this batch was created.
:type: datetime
.. attribute:: modified_at
Time when this batch was last modified.
:type: datetime
.. attribute:: canceled
Whether this batch has been canceled.
:type: bool
"""
def __init__(self):
MtBatchSms.__init__(self)
self.batch_id = None
self.created_at = None
self.modified_at = None
self.canceled = None
class MtBatchTextSmsResult(MtBatchSmsResult):
"""A textual batch as returned by the XMS endpoint.
This differs from the batch creation definition by the addition
of, for example, the batch identifier and the creation time.
.. attribute:: body
The message body or template. See
:py:attr:`MtBatchTextSmsCreate.parameters`.
:type: str
.. attribute:: parameters
The template parameters.
type *dict[str, dict[str, str]]*
"""
def __init__(self):
MtBatchSmsResult.__init__(self)
self.body = None
self.parameters = None
class MtBatchBinarySmsResult(MtBatchSmsResult):
"""A binary SMS batch as returned by XMS.
.. attribute:: body
The body of this binary message.
:type: bytes
.. attribute:: udh
The User Data Header of this binary message.
:type: bytes
"""
def __init__(self):
MtBatchSmsResult.__init__(self)
self.body = None
self.udh = None
class BatchDeliveryReport(object):
"""Batch delivery report.
A batch delivery report is divided into a number of *buckets* and
each such bucket contain statistics about batch messages having a
specific delivery status. The :py:attr:`statuses` property
contains the various buckets.
.. attribute:: batch_id
Identifier of the batch that this report covers.
:type: str
.. attribute:: total_message_count
The total number of messages sent as part of this batch.
:type: int
.. attribute:: statuses
The batch status buckets. This array describes the aggregated
status for the batch where each array element contains
information about messages having a certain delivery status and
delivery code.
:type: list[BatchDeliveryReportStatus]
"""
def __init__(self):
self.batch_id = None
self.total_message_count = None
self.statuses = []
class BatchDeliveryReportStatus(object):
"""Aggregated statistics for a given batch.
This represents the delivery statistics for a given statistics
*bucket*. See :py:class:`BatchDeliveryReport`.
.. attribute:: code
The delivery status code for this recipient bucket.
:type: int
.. attribute:: status
The delivery status for this recipient bucket.
:type: str
.. attribute:: count
The number of recipients belonging to this bucket.
:type: int
.. attribute:: recipients
The recipients having this status.
Note, this is non-empty only if a `full` delivery report has been
requested.
:type: set[str]
"""
def __init__(self):
self.code = None
self.status = None
self.count = None
self.recipients = set()
class BatchRecipientDeliveryReport(object):
"""A delivery report for an individual batch recipient.
.. attribute:: batch_id
The batch identifier.
:type: string
.. attribute:: recipient
The recipient address.
:type: string
.. attribute:: code
The delivery code.
:type: int
.. attribute:: status
The delivery status.
:type: int
.. attribute:: status_message
The delivery status message. The status message is not always
available and the attribute is set to *None* in those cases.
:type: string or None
.. attribute:: operator
The recipient's mobile operator. If the operator is not known,
then this is set to *None*.
:type: string or None
.. attribute:: status_at
The time at delivery.
:type: datetime
.. attribute:: operator_status_at
The time of delivery as reported by operator.
:type: datetime or None
"""
def __init__(self):
self.batch_id = None
self.recipient = None
self.code = None
self.status = None
self.status_message = None
self.operator = None
self.status_at = None
self.operator_status_at = None
class Error(object):
"""Describes error responses given by XMS.
:param str code: the error code
:param str text: the human readable error text
.. attribute:: code
A code that can be used to programmatically recognize the error.
:type: str
.. attribute:: text
Human readable description of the error.
:type: str
"""
def __init__(self, code, text):
self.code = code
self.text = text
class MtBatchDryRunResult(object):
"""A batch dry run report.
.. attribute:: number_of_recipients
The number of recipients that would receive the batch message.
:type: int
.. attribute:: number_of_message
The number of messages that will be sent.
:type: int
.. attribute:: per_recipient
The per-recipient dry-run result.
:type: list[DryRunPerRecipient]
"""
def __init__(self):
self.number_of_recipients = None
self.number_of_messages = None
self.per_recipient = []
class DryRunPerRecipient(object):
"""Per-recipient dry-run result.
Object of this class only occur within dry-run results. See
:class:`MtBatchDryRunResult`.
.. attribute:: recipient
The recipient.
:type: str
.. attribute:: number_of_parts
Number of message parts needed for the recipient.
:type: int
.. attribute:: body
Message body sent to this recipient.
:type: str
.. attribute:: encoding
Indicates the text encoding used for this recipient.
This is one of "text" or "unicode". See :const:`ENCODING_TEXT`
and :const:`ENCODING_UNICODE`.
:type: str
"""
ENCODING_TEXT = "text"
"""Constant indicating non-unicode encoding."""
ENCODING_UNICODE = "unicode"
"""Constant indicating unicode encoding."""
def __init__(self):
self.recipient = None
self.number_of_parts = None
self.body = None
self.encoding = None
class GroupAutoUpdate(object):
"""A description of automatic group updates.
An automatic update is triggered by a mobile originated message to
a given number containing special keywords.
When the given recipient receives a mobile originated SMS
containing keywords (first and/or second) matching the given
``add`` arguments then the sender MSISDN is added to the group.
Similarly, if the MO is matching the given ``remove`` keyword
arguments then the MSISDN is removed from the group.
For example::
GroupAutoUpdate(
recipient='12345',
add_first_word='add',
remove_first_word='remove')
would trigger based solely on the first keyword of the MO message.
On the other hand::
GroupAutoUpdate(
recipient='12345',
add_first_word='alert',
add_second_word='add',
remove_first_word='alert',
remove_second_word='remove')
would trigger only when both the first and second keyword are
given in the MO message.
:param str recipient: recipient that triggers this rule
:param add_first_word: first ``add`` keyword, default is `None`.
:type add_first_word: str or None
:param add_second_word: second ``add`` keyword, default is `None`.
:type add_second_word: str or None
:param remove_first_word: first ``remove`` keyword, default is `None`.
:type remove_first_word: str or None
:param remove_second_word: second ``remove`` keywords, default is `None`.
:type remove_second_word: str or None
.. attribute:: recipient
The recipient of the mobile originated message. A short code or
long number.
:type: str
.. attribute:: add_word_pair
A two-element tuple holding the first and second keyword that
causes the MO sender to be added to the group.
:type: tuple[str or None, str or None]
.. attribute:: remove_word_pair
A two-element tuple holding the first and second keyword that
causes the MO sender to be removed from the group.
:type: tuple[str or None, str or None]
"""
def __init__(self,
recipient,
add_first_word=None,
add_second_word=None,
remove_first_word=None,
remove_second_word=None):
self.recipient = recipient
self.add_word_pair = (add_first_word, add_second_word)
self.remove_word_pair = (remove_first_word, remove_second_word)
class GroupCreate(object):
"""A description of the fields necessary to create a group.
.. attribute:: name
The group name.
:type: str
.. attribute:: members
A set of MSISDNs that belong to this group.
:type: set[str]
.. attribute:: child_groups
A set of groups that in turn belong to this group.
:type: set[str]
.. attribute:: auto_update
Describes how this group should be auto updated.
If no auto updating should be performed for the group then this
value is ``None``.
:type: GroupAutoUpdate or None
.. attribute:: tags
The tags associated to this group.
:type: set[str]
"""
def __init__(self):
self.name = None
self.members = set()
self.child_groups = set()
self.auto_update = None
self.tags = set()
class GroupResult(object):
"""This class holds the result of a group fetch operation.
This may be used either standalone or as an element of a paged
result.
.. attribute:: group_id
The unique group identifier.
:type: str
.. attribute:: name
The group name.
:type: str
.. attribute:: size
The number of members of this group.
:type: int
.. attribute:: child_groups
A set of groups that in turn belong to this group.
:type: set[str]
.. attribute:: auto_update
Describes how this group should be auto updated.
If no auto updating should be performed for the group then this
value is ``None``.
:type: GroupAutoUpdate or None
.. attribute:: created_at
The time at which this group was created.
:type: datetime
.. attribute:: modified_at
The time when this group was last modified.
:type: datetime
"""
def __init__(self):
self.group_id = None
self.name = None
self.size = None
self.child_groups = set()
self.auto_update = None
self.created_at = None
self.modified_at = None
class GroupUpdate(object):
"""Describes updates that can be performed on a group.
.. attribute:: name
Updates the group name.
If ``None`` then the current value is kept, if :const:`.RESET`
then the value is reset to its XMS default, and if set to a
string the name is updated.
:type: None or str or Reset
.. attribute:: member_insertions
The MSISDNs that should be added to this group.
:type: set[str]
.. attribute:: member_removals
The MSISDNs that should be removed from this group.
:type: set[str]
.. attribute:: child_group_insertions
The child groups that should be added to this group.
:type: set[str]
.. attribute:: child_group_removals
The child groups that should be removed from this group.
:type: set[str]
.. attribute:: add_from_group
Identifier of a group whose members should be added to this
group.
:type: str
.. attribute:: remove_from_group
Identifier of a group whose members should be removed from this
group.
:type: str
.. attribute:: auto_update
Describes how this group should be auto updated.
If ``None`` then the current value is kept, if :const:`.RESET`
then the value is reset to its XMS default, and if set to a
``GroupAutoUpdate`` object the value is updated.
:type: None or GroupAutoUpdate or Reset
"""
def __init__(self):
self.name = None
self.member_insertions = set()
self.member_removals = set()
self.child_group_insertions = set()
self.child_group_removals = set()
self.add_from_group = None
self.remove_from_group = None
self.auto_update = None
class MoSms(object):
"""Base class for SMS mobile originated messages.
Holds fields that are common to both the textual and binary MO
classes.
.. attribute:: message_id
The message identifier.
:type: str
.. attribute:: recipient
The message recipient. This is a short code or long number.
:type: str
.. attribute:: sender
The message sender. This is an MSISDN.
:type: str
.. attribute:: operator
The MCCMNC of the originating operator, if available.
:type: str or None
.. attribute:: sent_at
The time when this message was sent, if available.
:type: datetime or None
.. attribute:: received_at
The time when the messaging system received this message.
:type: datetime
"""
def __init__(self):
self.message_id = None
self.recipient = None
self.sender = None
self.operator = None
self.sent_at = None
self.received_at = None
class MoTextSms(MoSms):
"""An SMS mobile originated message with textual content.
.. attribute:: body
The message body.
:type: str
.. attribute:: keyword
The message keyword, if available.
:type: str or None
"""
def __init__(self):
MoSms.__init__(self)
self.body = None
self.keyword = None
class MoBinarySms(MoSms):
"""An SMS mobile originated message with binary content.
.. attribute:: body
The binary message body.
:type: bytes
.. attribute:: udh
The user data header.
:type: bytes
"""
def __init__(self):
MoSms.__init__(self)
self.body = None
self.udh = None
class Page(object):
"""A page of elements.
The element type depends on the type of page that has been
retrieved. Typically it is one of :class:`MtSmsBatchResponse` or
:class:`GroupResponse`.
.. attribute:: page
The page number, starting from zero.
:type: int
.. attribute:: page
The number of elements on this page.
:type: int
.. attribute:: total_size
The total number of elements across all fetched pages.
:type: int
.. attribute:: content
The page elements.
:type: list[obj]
"""
def __init__(self):
self.page = None
self.size = None
self.total_size = None
self.content = None
def __iter__(self):
"""Returns an iterator over the content of this page.
For example, if the page is the result of a batch listing then
this iterator will yield batch results.
:returns: the page iterator
:rtype: iterator
"""
return iter(self.content)
class Pages(object):
"""A paged result.
It is possible to, for example, fetch individual pages or iterate
over all pages.
:param worker: worker function that fetches pages
"""
def __init__(self, worker):
self._worker = worker
def get(self, page):
"""Downloads a specific page.
:param int page: number of the page to fetch
:return: a page
:rtype: Page
"""
return self._worker(page)
def __iter__(self):
"""Iterator across all pages."""
return PagesIterator(self)
class PagesIterator(object):
"""An iterator over a paged result.
The key is the page number and the value corresponds to the
content of the pages.
:param Pages pages: the pages that we are iterating over
"""
def __init__(self, pages):
self._pages = pages
self._cur_page = None
self._position = 0
def next(self):
return self.__next__()
def __next__(self):
"""Steps this iterator to the next page."""
if not self._cur_page or self._cur_page.page != self._position:
self._cur_page = self._pages.get(self._position)
self._position += 1
# If we fetched an empty page then the iteration is over.
if self._cur_page.size <= 0:
raise StopIteration
else:
return self._cur_page
| {
"content_hash": "511af85963602ee40170353b941dbdac",
"timestamp": "",
"source": "github",
"line_count": 1171,
"max_line_length": 77,
"avg_line_length": 22.35269000853971,
"alnum_prop": 0.6245272206303725,
"repo_name": "clxcommunications/sdk-xms-python",
"id": "664e8f442c5bef3ae28c9bffbd7e85d56df65d51",
"size": "26288",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clx/xms/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "142564"
}
],
"symlink_target": ""
} |
import examples.connect
"""
Find a resource from the Compute service.
For a full guide see TODO(etoews):link to docs on developer.openstack.org
"""
def find_image(conn):
print("Find Image:")
image = conn.compute.find_image(examples.connect.IMAGE_NAME)
print(image)
return image
def find_flavor(conn):
print("Find Flavor:")
flavor = conn.compute.find_flavor(examples.connect.FLAVOR_NAME)
print(flavor)
return flavor
def find_keypair(conn):
print("Find Keypair:")
keypair = conn.compute.find_keypair(examples.connect.KEYPAIR_NAME)
print(keypair)
return keypair
| {
"content_hash": "3127078997303558d10d35c75e77caa9",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 73,
"avg_line_length": 16.864864864864863,
"alnum_prop": 0.6971153846153846,
"repo_name": "briancurtin/python-openstacksdk",
"id": "b009df922c9c3de54862b1fdd9b3d14dbaa982fd",
"size": "1170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/compute/find.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1956816"
},
{
"name": "Shell",
"bytes": "1865"
}
],
"symlink_target": ""
} |
import subprocess
from command import Command
from ..git_tools import git_base_command
class ListVersionsCommand(Command):
def __init__(self, flow, cmd_name, params):
Command.__init__(self, flow, cmd_name, params)
def exec_impl(self):
output = self.shell_helper(git_base_command() + ['tag'])
versions = output.split()
self.response = { 'success': True,
'message': 'Found version list',
'version_list': versions }
| {
"content_hash": "a9d7128e2104f672ed3550c29089ca11",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 69,
"avg_line_length": 29.77777777777778,
"alnum_prop": 0.5597014925373134,
"repo_name": "manylabs/flow",
"id": "6a016d0cd4311451f60abb862e5c2bdbbf9a4bd5",
"size": "536",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flow/commands/list_versions_command.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "92403"
},
{
"name": "Shell",
"bytes": "146"
}
],
"symlink_target": ""
} |
'''
Utilities module for jumeg
'''
# Authors: Jurgen Dammers (j.dammers@fz-juelich.de)
# Praveen Sripad (pravsripad@gmail.com)
# Eberhard Eich (e.eich@fz-juelich.de) ()
#
# License: BSD (3-clause)
import sys
import os
import os.path as op
import fnmatch
import numpy as np
import scipy as sci
from sklearn.utils import check_random_state
import fnmatch
import mne
from mne.utils import logger
def get_files_from_list(fin):
''' Return string of file or files as iterables lists '''
if isinstance(fin, list):
fout = fin
else:
if isinstance(fin, str):
fout = list([fin])
else:
fout = list(fin)
return fout
def retcode_error(command, subj):
print('%s did not run successfully for subject %s.' % (command, subj))
print('Please check the arguments, and rerun for subject.')
def get_jumeg_path():
'''Return the path where jumeg is installed.'''
return os.path.abspath(os.path.dirname(__file__))
def check_jumeg_standards(fnames):
'''
Checks for file name extension and provides information on type of file
fnames: str or list
'''
if isinstance(fnames, list):
fname_list = fnames
else:
if isinstance(fnames, str):
fname_list = list([fnames])
else:
fname_list = list(fnames)
print(fname_list)
# loop across all filenames
for fname in fname_list:
print(fname)
if fname == '' or not fname.endswith('.fif'):
print('Empty string or not a FIF format filename.')
elif fname.endswith('-meg.fif') or fname.endswith('-eeg.fif'):
print('Raw FIF file with only MEG or only EEG data.')
elif fname.split('-')[-1] == 'raw.fif':
print('Raw FIF file - Subject %s, Experiment %s, Data %s, Time %s, \
Trial number %s.' \
% (fname.split('_')[0], fname.split('_')[1], fname.split('_')[2],
fname.split('_')[3], fname.split('_')[4]))
print('Processing identifier in the file %s.' \
% (fname.strip('-raw.fif').split('_')[-1]))
elif fname.split('-')[-1] == 'ica.fif':
print('FIF file storing ICA session.')
elif fname.split('-')[-1] == 'evoked.fif':
print('FIF file with averages.')
elif fname.split('-')[-1] == 'epochs.fif':
print('FIF file with epochs.')
elif fname.split('-')[-1] == 'empty.fif':
print('Empty room FIF file.')
else:
print('No known file info available. Filename does not follow conventions.')
print('Please verify if the information is correct and make the appropriate changes!')
return
def get_sytem_type(info):
"""
Function to get type of the system used to record
the processed MEG data
"""
from mne.io.constants import FIFF
chs = info.get('chs')
coil_types = set([ch['coil_type'] for ch in chs])
channel_types = set([ch['kind'] for ch in chs])
has_4D_mag = FIFF.FIFFV_COIL_MAGNES_MAG in coil_types
ctf_other_types = (FIFF.FIFFV_COIL_CTF_REF_MAG,
FIFF.FIFFV_COIL_CTF_REF_GRAD,
FIFF.FIFFV_COIL_CTF_OFFDIAG_REF_GRAD)
elekta_types = (FIFF.FIFFV_COIL_VV_MAG_T3,
FIFF.FIFFV_COIL_VV_PLANAR_T1)
has_CTF_grad = (FIFF.FIFFV_COIL_CTF_GRAD in coil_types or
(FIFF.FIFFV_MEG_CH in channel_types and
any([k in ctf_other_types for k in coil_types])))
has_Elekta_grad = (FIFF.FIFFV_COIL_VV_MAG_T3 in coil_types or
(FIFF.FIFFV_MEG_CH in channel_types and
any([k in elekta_types for k in coil_types])))
if has_4D_mag:
system_type = 'magnesWH3600'
elif has_CTF_grad:
system_type = 'CTF-275'
elif has_Elekta_grad:
system_type = 'ElektaNeuromagTriux'
else:
# ToDo: Expand method to also cope with other systems!
print("System type not known!")
system_type = None
return system_type
def mark_bads_batch(subject_list, subjects_dir=None):
'''
Opens all raw files ending with -raw.fif in subjects
directory for marking bads.
Parameters
----------
subject_list: List of subjects.
subjects_dir: The subjects directory. If None, the default SUBJECTS_DIR
from environment will be considered.
Output
------
The raw files with bads marked are saved with _bcc (for bad channels checked)
added to the file name.
'''
for subj in subject_list:
print("For subject %s" % (subj))
if not subjects_dir: subjects_dir = os.environ['SUBJECTS_DIR']
dirname = subjects_dir + '/' + subj
sub_file_list = os.listdir(dirname)
for raw_fname in sub_file_list:
if raw_fname.endswith('_bcc-raw.fif'): continue
if raw_fname.endswith('-raw.fif'):
print("Raw calculations for file %s" % (dirname + '/' + raw_fname))
raw = mne.io.Raw(dirname + '/' + raw_fname, preload=True)
raw.plot(block=True)
print('The bad channels marked are %s ' % (raw.info['bads']))
save_fname = dirname + '/' + raw.filenames[0].split('/')[-1].split('-raw.fif')[0] + '_bcc-raw.fif'
raw.save(save_fname)
return
def rescale_artifact_to_signal(signal, artifact):
'''
Rescales artifact (ECG/EOG) to signal for plotting purposes
For evoked data, pass signal.data.mean(axis=0) and
artifact.data.mean(axis=0).
'''
b = (signal.max() - signal.min()) / (artifact.max() + artifact.min())
a = signal.max()
rescaled_artifact = artifact * b + a
return rescaled_artifact / 1e15
def check_read_raw(raw_name, preload=True):
'''
Checks if raw_name provided is a filename of raw object.
If it is a raw object, simply return, else read and return raw object.
raw_name: instance of mne.io.Raw | str
Raw object or filename to be read.
preload: bool
All data loaded to memory. Defaults to True.
'''
if isinstance(raw_name, (mne.io.Raw, mne.io.bti.bti.RawBTi)):
return raw_name
elif isinstance(raw_name, str):
raw = mne.io.Raw(raw_name, preload=preload)
return raw
else:
raise RuntimeError('%s type not mne.io.Raw or string.' % raw_name)
def peak_counter(signal):
''' Simple peak counter using scipy argrelmax function. '''
return sci.signal.argrelmax(signal)[0].shape
def update_description(raw, comment):
''' Updates the raw description with the comment provided. '''
raw.info['description'] = str(raw.info['description']) + ' ; ' + comment
def chop_raw_data(raw, start_time=60.0, stop_time=360.0, save=True, return_chop=False):
'''
This function extracts specified duration of raw data
and writes it into a fif file.
Five mins of data will be extracted by default.
Parameters
----------
raw: Raw object or raw file name as a string.
start_time: Time to extract data from in seconds. Default is 60.0 seconds.
stop_time: Time up to which data is to be extracted. Default is 360.0 seconds.
save: bool, If True the raw file is written to disk. (default: True)
return_chop: bool, Return the chopped raw object. (default: False)
'''
if isinstance(raw, str):
print('Raw file name provided, loading raw object...')
raw = mne.io.Raw(raw, preload=True)
# Check if data is longer than required chop duration.
if (raw.n_times / (raw.info['sfreq'])) < (stop_time + start_time):
logger.info("The data is not long enough for file %s.") % (raw.filenames[0])
return
# Obtain indexes for start and stop times.
assert start_time < stop_time, "Start time is greater than stop time."
crop = raw.copy().crop(tmin=start_time, tmax=stop_time)
dur = int((stop_time - start_time) / 60)
if save:
crop.save(crop.filenames[0].split('-raw.fif')[0] + ',' + str(dur) + 'm-raw.fif')
raw.close()
if return_chop:
return crop
else:
crop.close()
return
#######################################################
# #
# to extract the indices of the R-peak from #
# ECG single channel data #
# #
#######################################################
def get_peak_ecg(ecg, sfreq=1017.25, flow=10, fhigh=20,
pct_thresh=95.0, default_peak2peak_min=0.5,
event_id=999):
# -------------------------------------------
# import necessary modules
# -------------------------------------------
from mne.filter import filter_data
from jumeg.jumeg_math import calc_tkeo
from scipy.signal import argrelextrema as extrema
# -------------------------------------------
# filter ECG to get rid of noise and drifts
# -------------------------------------------
fecg = filter_data(ecg, sfreq, flow, fhigh,
n_jobs=1, method='fft')
ecg_abs = np.abs(fecg)
# -------------------------------------------
# apply Teager Kaiser energie Operator (TKEO)
# -------------------------------------------
tk_ecg = calc_tkeo(fecg)
# -------------------------------------------
# find all peaks of abs(EOG)
# since we don't know if the EOG lead has a
# positive or negative R-peak
# -------------------------------------------
ixpeak = extrema(tk_ecg, np.greater, axis=0)
# -------------------------------------------
# threshold for |R-peak|
# ------------------------------------------
peak_thresh_min = np.percentile(tk_ecg, pct_thresh, axis=0)
ix = np.where(tk_ecg[ixpeak] > peak_thresh_min)[0]
npeak = len(ix)
if (npeak > 1):
ixpeak = ixpeak[0][ix]
else:
return -1
# -------------------------------------------
# threshold for max Amplitude of R-peak
# fixed to: median + 3*stddev
# -------------------------------------------
mag = fecg[ixpeak]
mag_mean = np.median(mag)
if (mag_mean > 0):
nstd = 3
else:
nstd = -3
peak_thresh_max = mag_mean + nstd * np.std(mag)
ix = np.where(ecg_abs[ixpeak] < np.abs(peak_thresh_max))[0]
npeak = len(ix)
if (npeak > 1):
ixpeak = ixpeak[ix]
else:
return -1
# -------------------------------------------
# => test if the R-peak is positive or negative
# => we assume the the R-peak is the largest peak !!
#
# ==> sometime we have outliers and we should check
# the number of npos and nneg peaks -> which is larger? -> note done yet
# -> we assume at least 2 peaks -> maybe we should check the ratio
# -------------------------------------------
ixp = np.where(fecg[ixpeak] > 0)[0]
npos = len(ixp)
ixn = np.where(fecg[ixpeak] < 0)[0]
nneg = len(ixp)
if (npos == 0 and nneg == 0):
import pdb
pdb.set_trace()
if (npos > 3):
peakval_pos = np.abs(np.median(ecg[ixpeak[ixp]]))
else:
peakval_pos = 0
if (nneg > 3): peakval_neg = np.abs(np.median(ecg[ixpeak[ixn]]))
else:
peakval_neg = 0
if (peakval_pos > peakval_neg):
ixpeak = ixpeak[ixp]
ecg_pos = ecg
else:
ixpeak = ixpeak[ixn]
ecg_pos = - ecg
npeak = len(ixpeak)
if (npeak < 1):
return -1
# -------------------------------------------
# check if we have peaks too close together
# -------------------------------------------
peak_ecg = ixpeak/sfreq
dur = (np.roll(peak_ecg, -1)-peak_ecg)
ix = np.where(dur > default_peak2peak_min)[0]
npeak = len(ix)
if (npeak < 1):
return -1
ixpeak = np.append(ixpeak[0], ixpeak[ix])
peak_ecg = ixpeak/sfreq
dur = (peak_ecg-np.roll(peak_ecg, 1))
ix = np.where(dur > default_peak2peak_min)[0]
npeak = len(ix)
if (npeak < 1):
return -1
ixpeak = np.unique(np.append(ixpeak, ixpeak[ix[npeak-1]]))
npeak = len(ixpeak)
# -------------------------------------------
# search around each peak if we find
# higher peaks in a range of 0.1 s
# -------------------------------------------
seg_length = np.ceil(0.1 * sfreq)
for ipeak in range(0, npeak-1):
idx = [int(np.max([ixpeak[ipeak] - seg_length, 0])),
int(np.min([ixpeak[ipeak]+seg_length, len(ecg)]))]
idx_want = np.argmax(ecg_pos[idx[0]:idx[1]])
ixpeak[ipeak] = idx[0] + idx_want
# -------------------------------------------
# to be confirm with mne implementation
# -------------------------------------------
ecg_events = np.c_[ixpeak, np.zeros(npeak),
np.zeros(npeak)+event_id]
return ecg_events.astype(int)
#######################################################
#
# make surrogates CTPS phase trials
#
#######################################################
def make_surrogates_ctps(phase_array, nrepeat=1000, mode='shuffle', n_jobs=4,
verbose=None):
''' calculate surrogates from an array of (phase) trials
by means of shuffling the phase
Parameters
----------
phase_trial : 4d ndarray of dimension [nfreqs x ntrials x nchan x nsamples]
Optional:
nrepeat:
mode: 2 different modi are allowed.
'mode=shuffle' whill randomly shuffle the phase values. This is the default
'mode=shift' whill randomly shift the phase values
n_jobs: number of cpu nodes to use
verbose: verbose level (does not work yet)
Returns
-------
pt : shuffled phase trials
'''
from joblib import Parallel, delayed
from mne.parallel import parallel_func
from mne.preprocessing.ctps_ import kuiper
nfreq, ntrials, nsources, nsamples = phase_array.shape
pk = np.zeros((nfreq, nrepeat, nsources, nsamples), dtype='float32')
# create surrogates: parallised over nrepeats
parallel, my_kuiper, _ = parallel_func(kuiper, n_jobs, verbose=verbose)
for ifreq in range(nfreq):
for isource in range(nsources):
# print ">>> working on frequency: ",bp[ifreq,:]," source: ",isource+1
print(">>> working on frequency range: ",ifreq + 1," source: ",isource + 1)
pt = phase_array[ifreq, :, isource, :] # extract [ntrials, nsamp]
if(mode=='shuffle'):
# shuffle phase values for all repetitions
pt_s = Parallel(n_jobs=n_jobs, verbose=0)(delayed(shuffle_data)
(pt) for i in range(nrepeat))
else:
# shift all phase values for all repetitions
pt_s = Parallel(n_jobs=n_jobs, verbose=0)(delayed(shift_data)
(pt) for i in range(nrepeat))
# calculate Kuiper's statistics for each phase array
out = parallel(my_kuiper(i) for i in pt_s)
# store stat and pk in different arrays
out = np.array(out, dtype='float32')
# ks[ifreq,:,isource,:] = out[:,0,:] # is actually not needed
pk[ifreq, :, isource, :] = out[:, 1, :] # [nrepeat, pk_idx, nsamp]
return pk
#######################################################
#
# calc stats on CTPS surrogates
#
#######################################################
def get_stats_surrogates_ctps(pksarr, verbose=False):
''' calculates some stats on the CTPS pk values obtain from surrogate tests.
Parameters
----------
pksarr : 4d ndarray of dimension [nfreq x nrepeat x nsources x nsamples]
Optional:
verbose: print some information on stdout
Returns
-------
stats : stats info stored in a python dictionary
'''
import os
import numpy as np
nfreq, nrepeat, nsources, nsamples = pksarr.shape
pks = np.reshape(pksarr, (nfreq, nrepeat * nsources * nsamples)) # [nsource * nrepeat, nbp]
# stats for each frequency band
pks_max = pks.max(axis=1)
pks_min = pks.min(axis=1)
pks_mean = pks.mean(axis=1)
pks_std = pks.std(axis=1)
# global stats
pks_max_global = pks.max()
pks_min_global = pks.min()
pks_mean_global = pks.mean()
pks_std_global = pks.std()
pks_pct99_global = np.percentile(pksarr, 99)
pks_pct999_global = np.percentile(pksarr, 99.9)
pks_pct9999_global = np.percentile(pksarr, 99.99)
# collect info and store into dictionary
stats = {
'path': os.getcwd(),
'fname': 'CTPS surrogates',
'nrepeat': nrepeat,
'nfreq': nfreq,
'nsources': nsources,
'nsamples': nsamples,
'pks_min': pks_min,
'pks_max': pks_max,
'pks_mean': pks_mean,
'pks_std': pks_std,
'pks_min_global': pks_min_global,
'pks_max_global': pks_max_global,
'pks_mean_global': pks_mean_global,
'pks_std_global': pks_std_global,
'pks_pct99_global': pks_pct99_global,
'pks_pct999_global': pks_pct999_global,
'pks_pct9999_global': pks_pct9999_global
}
# mean and std dev
if (verbose):
print('>>> Stats from CTPS surrogates <<<')
for i in range(nfreq):
#print ">>> filter raw data: %0.1f - %0.1f..." % (flow, fhigh)
print('freq: ',i + 1, 'max/mean/std: ', pks_max[i], pks_mean[i], pks_std[i])
print()
print('overall stats:')
print('max/mean/std: ', pks_global_max, pks_global_mean, pks_global_std)
print('99th percentile: ', pks_global_pct99)
print('99.90th percentile: ', pks_global_pct999)
print('99.99th percentile: ', pks_global_pct9999)
return stats
###########################################################
#
# These functions copied from NIPY (http://nipy.org/nitime)
#
###########################################################
def threshold_arr(cmat, threshold=0.0, threshold2=None):
"""Threshold values from the input array.
Parameters
----------
cmat : array
threshold : float, optional.
First threshold.
threshold2 : float, optional.
Second threshold.
Returns
-------
indices, values: a tuple with ndim+1
Examples
--------
>>> np.set_printoptions(precision=4) # For doctesting
>>> a = np.linspace(0,0.2,5)
>>> a
array([ 0. , 0.05, 0.1 , 0.15, 0.2 ])
>>> threshold_arr(a,0.1)
(array([3, 4]), array([ 0.15, 0.2 ]))
With two thresholds:
>>> threshold_arr(a,0.1,0.2)
(array([0, 1]), array([ 0. , 0.05]))
"""
# Select thresholds
if threshold2 is None:
th_low = -np.inf
th_hi = threshold
else:
th_low = threshold
th_hi = threshold2
# Mask out the values we are actually going to use
idx = np.where((cmat < th_low) | (cmat > th_hi))
vals = cmat[idx]
return idx + (vals,)
def thresholded_arr(arr, threshold=0.0, threshold2=None, fill_val=np.nan):
"""Threshold values from the input matrix and return a new matrix.
Parameters
----------
arr : array
threshold : float
First threshold.
threshold2 : float, optional.
Second threshold.
Returns
-------
An array shaped like the input, with the values outside the threshold
replaced with fill_val.
Examples
--------
"""
a2 = np.empty_like(arr)
a2.fill(fill_val)
mth = threshold_arr(arr, threshold, threshold2)
idx, vals = mth[:-1], mth[-1]
a2[idx] = vals
return a2
def rescale_arr(arr, amin, amax):
"""Rescale an array to a new range.
Return a new array whose range of values is (amin,amax).
Parameters
----------
arr : array-like
amin : float
new minimum value
amax : float
new maximum value
Examples
--------
>>> a = np.arange(5)
>>> rescale_arr(a,3,6)
array([ 3. , 3.75, 4.5 , 5.25, 6. ])
"""
# old bounds
m = arr.min()
M = arr.max()
# scale/offset
s = float(amax - amin) / (M - m)
d = amin - s * m
# Apply clip before returning to cut off possible overflows outside the
# intended range due to roundoff error, so that we can absolutely guarantee
# that on output, there are no values > amax or < amin.
return np.clip(s * arr + d, amin, amax)
def mask_indices(n, mask_func, k=0):
"""Return the indices to access (n,n) arrays, given a masking function.
Assume mask_func() is a function that, for a square array a of size (n,n)
with a possible offset argument k, when called as mask_func(a,k) returns a
new array with zeros in certain locations (functions like triu() or tril()
do precisely this). Then this function returns the indices where the
non-zero values would be located.
Parameters
----------
n : int
The returned indices will be valid to access arrays of shape (n,n).
mask_func : callable
A function whose api is similar to that of numpy.tri{u,l}. That is,
mask_func(x,k) returns a boolean array, shaped like x. k is an optional
argument to the function.
k : scalar
An optional argument which is passed through to mask_func(). Functions
like tri{u,l} take a second argument that is interpreted as an offset.
Returns
-------
indices : an n-tuple of index arrays.
The indices corresponding to the locations where mask_func(ones((n,n)),k)
is True.
Examples
--------
These are the indices that would allow you to access the upper triangular
part of any 3x3 array:
>>> iu = mask_indices(3,np.triu)
For example, if `a` is a 3x3 array:
>>> a = np.arange(9).reshape(3,3)
>>> a
array([[0, 1, 2],
[3, 4, 5],
[6, 7, 8]])
Then:
>>> a[iu]
array([0, 1, 2, 4, 5, 8])
An offset can be passed also to the masking function. This gets us the
indices starting on the first diagonal right of the main one:
>>> iu1 = mask_indices(3,np.triu,1)
with which we now extract only three elements:
>>> a[iu1]
array([1, 2, 5])
"""
m = np.ones((n, n), int)
a = mask_func(m, k)
return np.where(a != 0)
def triu_indices(n, k=0):
"""Return the indices for the upper-triangle of an (n,n) array.
Parameters
----------
n : int
Sets the size of the arrays for which the returned indices will be valid.
k : int, optional
Diagonal offset (see triu() for details).
Examples
--------
Commpute two different sets of indices to access 4x4 arrays, one for the
upper triangular part starting at the main diagonal, and one starting two
diagonals further right:
>>> iu1 = triu_indices(4)
>>> iu2 = triu_indices(4,2)
Here is how they can be used with a sample array:
>>> a = np.array([[1,2,3,4],[5,6,7,8],[9,10,11,12],[13,14,15,16]])
>>> a
array([[ 1, 2, 3, 4],
[ 5, 6, 7, 8],
[ 9, 10, 11, 12],
[13, 14, 15, 16]])
Both for indexing:
>>> a[iu1]
array([ 1, 2, 3, 4, 6, 7, 8, 11, 12, 16])
And for assigning values:
>>> a[iu1] = -1
>>> a
array([[-1, -1, -1, -1],
[ 5, -1, -1, -1],
[ 9, 10, -1, -1],
[13, 14, 15, -1]])
These cover almost the whole array (two diagonals right of the main one):
>>> a[iu2] = -10
>>> a
array([[ -1, -1, -10, -10],
[ 5, -1, -1, -10],
[ 9, 10, -1, -1],
[ 13, 14, 15, -1]])
See also
--------
- tril_indices : similar function, for lower-triangular.
- mask_indices : generic function accepting an arbitrary mask function.
"""
return mask_indices(n, np.triu, k)
def create_dummy_raw(data, ch_types, sfreq, ch_names, save=False,
raw_fname='output.fif'):
'''
A function that can be used to quickly create a raw object with the
data provided.
Inspired from https://gist.github.com/dengemann/e9b45f2ff3e3380907d3
Parameters
----------
data: ndarray, shape (n_channels, n_times)
ch_types: list eg. ['misc'], ['eeg'] or ['meg']
sfreq: float
Sampling frequency.
ch_names: list
List of channel names.
save : bool
If True, the raw object will be saved as a fif. file.
raw_fname : str
If save is True, the name of the saved fif file.
Returns
-------
raw : Instance of mne.io.Raw
Example
-------
rng = np.random.RandomState(42)
data = rng.random_sample((248, 2000))
sfreq = 1e3
ch_types = ['misc'] * 248
ch_names = ['MISC {:03d}'.format(i + 1) for i in range(len(ch_types))]
raw = create_dummy_raw(data, ch_types, sfreq, ch_names)
'''
info = mne.create_info(ch_names=ch_names, sfreq=sfreq, ch_types=ch_types)
raw = mne.io.RawArray(data, info)
if save:
raw.save(raw_fname)
return raw
def create_dummy_epochs(data, events, ch_types, sfreq, ch_names, save=False,
epochs_fname='output-epo.fif'):
'''
A function that can be used to quickly create an Epochs object with the
data provided.
Inspired from https://gist.github.com/dengemann/e9b45f2ff3e3380907d3
Parameters
----------
data: ndarray, shape (n_channels, n_times)
events: ndarray (n_events, 3)
As returned by mne.find_events
ch_types: list eg. ['misc'], ['eeg'] or ['meg']
sfreq: float
Sampling frequency.
ch_names: list
List of channel names.
save : bool
If True, the epochs object will be saved as a fif. file.
epochs_fname : str
If save is True, the name of the saved fif file.
Returns
-------
epochs : Instance of mne.Epochs
Example
-------
rng = np.random.RandomState(42)
data = rng.random_sample((248, 2000))
sfreq = 1e3
ch_types = ['misc'] * 248
ch_names = ['MISC {:03d}'.format(i + 1) for i in range(len(ch_types))]
# make event with - event id 42, 10 events of duration 100 s each, 0 stim signal
events = np.array((np.arange(0, 1000, 100), np.zeros((10)), np.array([42] * 10))).T
epochs = create_dummy_epochs(data, events, ch_types, sfreq, ch_names)
'''
info = mne.create_info(ch_names=ch_names, sfreq=sfreq, ch_types=ch_types)
epochs = mne.EpochsArray(data, info, events)
if save:
epochs.save(epochs_fname)
return epochs
def put_pngs_into_html(regexp, html_out='output.html'):
'''Lists all files in directory that matches pattern regexp
and puts it into an html file with filename included.
regexp : str
String of dir path like '/home/kalka/*.png'
html_out : str
Output file name
'''
import glob
files = glob.glob(regexp)
html_string = ''
for fname in files:
my_string = '<body><p>%s</p></body>' % (fname) + '\n' + '<img src=%s>' % (fname) + '\n'
html_string += my_string
f = open(html_out, 'w')
message = """<html>
<head></head>
%s
</html>""" % (html_string)
f.write(message)
f.close()
def crop_images(regexp, crop_dims=(150, 150, 1450, 700), extension='crop'):
'''Lists all files in directory that matches pattern regexp
and puts it into an html file with filename included.
regexp : str
String of dir path like '/home/kalka/*.png'
crop_dims : box tuple
Dimensions to crop image (using PIL)
(left, upper, right, lower) pixel values
extension : str
Output file name will be appended with extension.
'''
import glob
try:
from PIL import Image
except ImportError:
raise RuntimeError('For this method to work the PIL library is'
' required.')
files = glob.glob(regexp)
for fname in files:
orig = Image.open(fname)
out_fname = op.splitext(fname)[0] + ',' + extension +\
op.splitext(fname)[1]
cropim = orig.crop((150, 150, 1450, 700))
print('Saving cropped image at %s' % out_fname)
cropim.save(out_fname, fname.split('.')[1])
def check_env_variables(env_variable=None, key=None):
'''Check the most important environment variables as
(keys) - SUBJECTS_DIR, MNE_ROOT and FREESURFER_HOME.
e.g. subjects_dir = check_env_variable(subjects_dir, key='SUBJECTS_DIR')
If subjects_dir provided exists, then it is prioritized over the env variable.
If not, then the environment variable pertaining to the key is returned. If both
do not exist, then exits with an error message.
Also checks if the directory exists.
'''
if key is None or not isinstance(key, str):
print ('Please provide the key. Currently '
'SUBJECTS_DIR, MNE_ROOT and FREESURFER_HOME as strings are allowed.')
sys.exit()
# Check subjects_dir
if env_variable:
os.environ[key] = env_variable
elif env_variable is None and key in os.environ:
env_variable = os.environ[key]
else:
print('Please set the %s' % (key))
sys.exit()
if not os.path.isdir(env_variable):
print('Path %s is not a valid directory. Please check.' % (env_variable))
sys.exit()
return env_variable
def convert_annot2labels(annot_fname, subject='fsaverage', subjects_dir=None,
freesurfer_home=None):
'''
Convert an annotation to labels for a single subject for both hemispheres.
The labels are written to '$SUBJECTS_DIR/$SUBJECT/label'.
Parameters
----------
annot_fname: str
The name of the annotation (or parcellation).
subject: str
Subject name. Default is the fresurfer fsaverage.
subjects_dir: str
The subjects directory, if not provided, then the
environment value is used.
freesurfer_home: str
The freeesurfer home path, if not provided, the
environment value is used.
Reference
---------
https://surfer.nmr.mgh.harvard.edu/fswiki/mri_annotation2label
'''
from subprocess import call
subjects_dir = check_env_variables(subjects_dir, key='SUBJECTS_DIR')
freesurfer_home = check_env_variables(freesurfer_home, key='FREESURFER_HOME')
freesurfer_bin = os.path.join(freesurfer_home, 'bin', '')
outdir = os.path.join(subjects_dir, subject, 'label')
print('Convert annotation %s to labels' % (annot_fname))
for hemi in ['lh', 'rh']:
retcode = call([freesurfer_bin + '/mri_annotation2label', '--subject', subject, '--hemi', hemi,
'--annotation', annot_fname, '--outdir', outdir])
if retcode != 0:
retcode_error('mri_annotation2label')
continue
def convert_label2label(annot_fname, subjects_list, srcsubject='fsaverage',
subjects_dir=None, freesurfer_home=None):
'''
Python wrapper for Freesurfer mri_label2label function.
Converts all labels in annot_fname from source subject to target subject
given the subjects directory. Both hemispheres are considered.
The registration method used it surface.
Parameters
----------
annot_fname: str
The name of the annotation (or parcellation).
subjects_list: list or str
Subject names to which the labels have to be transformed to (the target subjects).
Can be provided as a list or a string.
srcsubject: str
The name of the source subject to be used. The source subject should
contain the labels in the correct folders already. Default - fsaverage.
subjects_dir: str
The subjects directory, if not provided, then the
environment value is used.
freesurfer_home: str
The freeesurfer home path, if not provided, the
environment value is used.
Reference:
https://surfer.nmr.mgh.harvard.edu/fswiki/mri_label2label
'''
if subjects_list:
subjects_list = get_files_from_list(subjects_list)
else:
raise RuntimeError('No subjects are specified.')
subjects_dir = check_env_variables(subjects_dir, key='SUBJECTS_DIR')
freesurfer_home = check_env_variables(freesurfer_home, key='FREESURFER_HOME')
freesurfer_bin = os.path.join(freesurfer_home, 'bin', '')
# obtain the names of labels in parcellation
from mne.label import read_labels_from_annot
labels = read_labels_from_annot(srcsubject, parc=annot_fname)
lnames = [l.name.rsplit('-')[0] if l.hemi is 'lh' else '' for l in labels]
lnames = [_f for _f in lnames if _f] # remove empty strings
# convert the labels from source subject to target subject
from subprocess import call
for subj in subjects_list:
# the target subject is subj provided
print('Converting labels from %s to %s' % (srcsubject, subj))
for label in lnames:
for hemi in ['lh', 'rh']:
srclabel = os.path.join(subjects_dir, srcsubject, 'label', hemi + '.' + label + '.label')
trglabel = os.path.join(subjects_dir, subj, 'label', hemi + '.' + label + '.label')
retcode = call([freesurfer_bin + 'mri_label2label', '--srclabel', srclabel, '--srcsubject', srcsubject,
'--trglabel', trglabel, '--trgsubject', subj, '--regmethod', 'surface', '--hemi', hemi])
if retcode != 0:
retcode_error('mri_label2label')
continue
print('Labels for %d subjects have been transformed from source %s' %(len(subjects_list), srcsubject))
def get_cmap(N, cmap='hot'):
'''Returns a function that maps each index in 0, 1, ... N-1 to a distinct
RGB color. Can be used to generate N unique colors from a colormap.
Usage:
my_colours = get_cmap(3)
for i in range(3):
# print the RGB value of each of the colours
print my_colours(i)
'''
import matplotlib.cm as cmx
import matplotlib.colors as colors
color_norm = colors.Normalize(vmin=0, vmax=N-1)
scalar_map = cmx.ScalarMappable(norm=color_norm, cmap=cmap)
def map_index_to_rgb_color(index):
return scalar_map.to_rgba(index)
return map_index_to_rgb_color
def subtract_overlapping_vertices(label, labels):
'''
Check if label overlaps with others in labels
and return a new label without the overlapping vertices.
The output label contains the original label vertices minus
vertices from all overlapping labels in the list.
label : instance of mne.Label
labels : list of labels
'''
for lab in labels:
if (lab.hemi == label.hemi and
np.intersect1d(lab.vertices, label.vertices).size > 0 and
lab is not label):
label = label - lab
if label.vertices.size > 0:
return label
else:
print('Label has no vertices left ')
return None
def apply_percentile_threshold(in_data, percentile):
''' Return ndarray with all values below percentile set to 0. '''
in_data[in_data <= np.percentile(in_data, percentile)] = 0.
return in_data
def channel_indices_from_list(fulllist, findlist, excllist=None):
"""Get indices of matching channel names from list
Parameters
----------
fulllist: list of channel names
findlist: list of (regexp) names to find
regexp are resolved using mne.pick_channels_regexp()
excllist: list of channel names to exclude,
e.g., raw.info.get('bads')
Returns
-------
chnpick: array with indices
"""
chnpick = []
for ir in range(len(findlist)):
if findlist[ir].translate(str.maketrans('', '')).isalnum():
try:
chnpicktmp = ([fulllist.index(findlist[ir])])
chnpick = np.array(np.concatenate((chnpick, chnpicktmp), axis=0),
dtype=int)
except:
print(">>>>> Channel '%s' not found." % findlist[ir])
else:
chnpicktmp = (mne.pick_channels_regexp(fulllist, findlist[ir]))
if len(chnpicktmp) == 0:
print(">>>>> '%s' does not match any channel name." % findlist[ir])
else:
chnpick = np.array(np.concatenate((chnpick, chnpicktmp), axis=0),
dtype=int)
if len(chnpick) > 1:
# Remove duplicates
chnpick = np.sort(np.array(list(set(np.sort(chnpick)))))
if excllist is not None and len(excllist) > 0:
exclinds = [fulllist.index(excllist[ie]) for ie in range(len(excllist))]
chnpick = list(np.setdiff1d(chnpick, exclinds))
return chnpick
def time_shuffle_slices(fname_raw, shufflechans=None, tmin=None, tmax=None):
"""Permute time slices for specified channels.
Parameters
----------
fname_raw : (list of) rawfile names
shufflechans : list of string
List of channels to shuffle.
If empty use the meg, ref_meg, and eeg channels.
shufflechans may contain regexp, which are resolved
using mne.pick_channels_regexp().
All other channels are copied.
tmin : lower latency bound for shuffle region [start of trace]
tmax : upper latency bound for shuffle region [ end of trace]
Slice shuffling can be restricted to one region in the file,
the remaining parts will contain plain copies.
Outputfile
----------
<wawa>,tperm-raw.fif for input <wawa>-raw.fif
Returns
-------
TBD
Bugs
----
- it's the user's responsibility to keep track of shuffled chans
- needs to load the entire data set for operation
TODO
----
Return raw object and indices of time shuffled channels.
"""
from math import floor, ceil
from mne.io.pick import pick_types, channel_indices_by_type
fnraw = get_files_from_list(fname_raw)
# loop across all filenames
for fname in fnraw:
if not op.isfile(fname):
print('Exiting. File not present ', fname)
sys.exit()
raw = mne.io.Raw(fname, preload=True)
# time window selection
# slices are shuffled in [tmin,tmax], but the entire data set gets copied.
if tmin is None:
tmin = 0.
if tmax is None:
tmax = (raw.last_samp - raw.first_samp) / raw.info['sfreq']
itmin = int(floor(tmin * raw.info['sfreq']))
itmax = int(ceil(tmax * raw.info['sfreq']))
if itmax-itmin < 1:
raise ValueError("Time-window for slice shuffling empty/too short")
print(">>> Set time-range to [%7.3f, %7.3f]" % (tmin, tmax))
if shufflechans is None or len(shufflechans) == 0:
shflpick = mne.pick_types(raw.info, meg=True, ref_meg=True,
eeg=True, eog=False, stim=False)
else:
shflpick = channel_indices_from_list(raw.info['ch_names'][:],
shufflechans)
nshfl = len(shflpick)
if nshfl == 0:
raise ValueError("No channel selected for slice shuffling")
totbytype = ''
shflbytype = ''
channel_indices_by_type = mne.io.pick.channel_indices_by_type(raw.info)
for k in list(channel_indices_by_type.keys()):
tot4key = len(channel_indices_by_type[k][:])
if tot4key>0:
totbytype = totbytype + "%s:" % k + \
"%c%dd " % ('%', int(ceil(np.log10(tot4key+1)))) % tot4key
shflbytype = shflbytype + "%s:" % k + \
"%c%dd " % ('%', int(ceil(np.log10(tot4key+1)))) % \
len(np.intersect1d(shflpick, channel_indices_by_type[k][:]))
print(">>> %3d channels in file: %s" % (len(raw.info['chs']), totbytype))
print(">>> %3d channels shuffled: %s" % (len(shflpick), shflbytype))
print("Calc shuffle-array...")
numslice = raw._data.shape[1]
lselbuf = np.arange(numslice)
lselbuf[itmin:itmax] = itmin + np.random.permutation(itmax-itmin)
print("Shuffling slices for selected channels:")
data, times = raw[:, 0:numslice]
# work on entire data stream
for isl in range(raw._data.shape[1]):
slice = np.take(raw._data, [lselbuf[isl]], axis=1)
data[shflpick, isl] = slice[shflpick].flatten()
# copy data to raw._data
for isl in range(raw._data.shape[1]):
raw._data[:, isl] = data[:, isl]
shflname = os.path.join(os.path.dirname(fname),
os.path.basename(fname).split('-')[0]) + ',tperm-raw.fif'
print("Saving '%s'..." % shflname)
raw.save(shflname, overwrite=True)
return
def rescale_data(data, times, baseline, mode='mean', copy=True, verbose=None):
"""Rescale aka baseline correct data.
Parameters
----------
data : array
It can be of any shape. The only constraint is that the last
dimension should be time.
times : 1D array
Time instants is seconds.
baseline : tuple or list of length 2, ndarray or None
The time interval to apply rescaling / baseline correction.
If None do not apply it. If baseline is ``(bmin, bmax)``
the interval is between ``bmin`` (s) and ``bmax`` (s).
If ``bmin is None`` the beginning of the data is used
and if ``bmax is None`` then ``bmax`` is set to the end of the
interval. If baseline is ``(None, None)`` the entire time
interval is used.
If baseline is an array, then the given array will
be used for computing the baseline correction i.e. the mean will be
computed from the array provided. The array has to be the same length
as the time dimension of the data. (Use case: if different prestim baseline
needs to be applied on evoked signals around the response)
If baseline is None, no correction is applied.
mode : None | 'ratio' | 'zscore' | 'mean' | 'percent' | 'logratio' | 'zlogratio' # noqa
Do baseline correction with ratio (power is divided by mean
power during baseline) or zscore (power is divided by standard
deviation of power during baseline after subtracting the mean,
power = [power - mean(power_baseline)] / std(power_baseline)), mean
simply subtracts the mean power, percent is the same as applying ratio
then mean, logratio is the same as mean but then rendered in log-scale,
zlogratio is the same as zscore but data is rendered in log-scale
first.
If None no baseline correction is applied.
copy : bool
Whether to return a new instance or modify in place.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
data_scaled: array
Array of same shape as data after rescaling.
Note
----
Function taken from mne.baseline.rescale in mne-python.
(https://github.com/mne-tools/mne-python)
"""
data = data.copy() if copy else data
from mne.baseline import _log_rescale
_log_rescale(baseline, mode)
if baseline is None:
return data
if isinstance(baseline, np.ndarray):
if times.size == baseline.size:
# use baseline array as data
use_array = baseline
else:
raise ValueError('Size of times and baseline should be the same')
else:
bmin, bmax = baseline
if bmin is None:
imin = 0
else:
imin = np.where(times >= bmin)[0]
if len(imin) == 0:
raise ValueError('bmin is too large (%s), it exceeds the largest '
'time value' % (bmin,))
imin = int(imin[0])
if bmax is None:
imax = len(times)
else:
imax = np.where(times <= bmax)[0]
if len(imax) == 0:
raise ValueError('bmax is too small (%s), it is smaller than the '
'smallest time value' % (bmax,))
imax = int(imax[-1]) + 1
if imin >= imax:
raise ValueError('Bad rescaling slice (%s:%s) from time values %s, %s'
% (imin, imax, bmin, bmax))
use_array = data[..., imin:imax]
# avoid potential "empty slice" warning
if data.shape[-1] > 0:
mean = np.mean(use_array, axis=-1)[..., None]
else:
mean = 0 # otherwise we get an ugly nan
if mode == 'mean':
data -= mean
if mode == 'logratio':
data /= mean
data = np.log10(data) # a value of 1 means 10 times bigger
if mode == 'ratio':
data /= mean
elif mode == 'zscore':
std = np.std(use_array, axis=-1)[..., None]
data -= mean
data /= std
elif mode == 'percent':
data -= mean
data /= mean
elif mode == 'zlogratio':
data /= mean
data = np.log10(data)
std = np.std(use_array, axis=-1)[..., None]
data /= std
return data
def rank_estimation(data):
'''
Function to estimate the rank of the data using different rank estimators.
'''
from jumeg.decompose.ica import whitening
from jumeg.decompose.dimension_selection import mibs, gap, aic, mdl, bic
nchan, ntsl = data.shape
# perform PCA to get sorted eigenvalues
data_w, pca = whitening(data.T)
# apply different rank estimators
# MIBS, BIC, GAP, AIC, MDL, pct95, pct99
rank1 = mibs(pca.explained_variance_, ntsl) # MIBS
rank2 = bic(pca.explained_variance_, ntsl) # BIC
rank3 = gap(pca.explained_variance_) # GAP
rank4 = aic(pca.explained_variance_) # AIC
rank5 = mdl(pca.explained_variance_) # MDL
rank6 = np.where(pca.explained_variance_ratio_.cumsum() <= 0.95)[0].size
rank7 = np.where(pca.explained_variance_ratio_.cumsum() <= 0.99)[0].size
rank_all = np.array([rank1, rank2, rank3, rank4, rank5, rank6, rank7])
return (rank_all, np.median(rank_all))
def clip_eog2(eog, clip_to_value):
'''
Function to clip the EOG channel to a certain clip_to_value.
All peaks higher than given value are pruned.
Note: this may be used when peak detection for artefact removal fails due to
abnormally high peaks in the EOG channel.
Can be applied to a raw file using the below code:
# apply the above function to one channel (here 276) of the raw object
raw.apply_function(clip_eog2, clip_to_value=clip_to_value, picks=[276],
dtype=None, n_jobs=2)
# saw the raw file
raw.save(raw.info['filename'].split('-raw.fif')[0] + ',eogclip-raw.fif',
overwrite=False)
'''
if clip_to_value < 0:
eog_clipped = np.clip(eog, clip_to_value, np.max(eog))
elif clip_to_value > 0:
eog_clipped = np.clip(eog, np.min(eog), clip_to_value)
else:
print('Zero clip_to_value is ambigious !! Please check again.')
return eog_clipped
def loadingBar(count, total, task_part=None):
""" Provides user with a loadingbar line. See following:
041/400 [== ] Subtask 793
count/total [== ] 'task_part'
Parameters
----------
count : str, float or int
Current task count. Easy to access throught 'enumerate()'
total : str, float or int
Maximal number of all tasks
task_part : String | Optional
If the task is divided in subtask and you want to keep track of
your functions progress in detail pass your subtask in string format.
Example
-------
array = np.linspace(1, 1000, 400)
for p, i in enumerate(array):
loadingBar(count=p, total=array.shape[0],
task_part='Subtask')
Returns
-------
stdout : Rewriteable String Output
Generates a String Output for every of the progress steps
"""
if task_part is None:
task_part = ''
percent = float(count + 1) / float(total) * 100
size = 2
sys.stdout.write("\r "
+ str(int(count + 1)).rjust(3, '0')
+ "/" + str(int(total)).rjust(3, '0')
+ ' [' + '=' * int(percent / 10) * size
+ ' ' * (10 - int(percent / 10)) * size
+ '] %30s' % (task_part))
if count + 1 == total:
finish = '[done]'
sys.stdout.write("\r "
+ str(int(count + 1)).rjust(3, '0')
+ "/" + str(int(total)).rjust(3, '0')
+ ' [' + '=' * int(percent / 10) * size
+ ' ' * (10 - int(percent / 10)) * size
+ '] %30s\n' % (finish))
return
def find_files(rootdir='.', pattern='*', recursive=False):
'''
Search and get list of filenames matching pattern.
'''
files = []
for root, dirnames, filenames in os.walk(rootdir):
if not recursive:
del dirnames[:]
for filename in fnmatch.filter(filenames, pattern):
files.append(os.path.join(root, filename))
files = sorted(files)
return files
def find_directories(rootdir='.', pattern='*'):
'''
Search and get a list of directories matching pattern.
'''
path = rootdir
if path[-1] != '/':
path += '/'
# search for directories in rootdir
dirlist=[]
for filename in os.listdir(rootdir):
if os.path.isdir(path+filename) == True:
dirlist.append(filename)
dirlist = sorted(dirlist)
# select those which match pattern
dirlist = fnmatch.filter(dirlist, pattern)
return dirlist
| {
"content_hash": "75e07c7ed275b3a00fbe0f766a7532cc",
"timestamp": "",
"source": "github",
"line_count": 1483,
"max_line_length": 119,
"avg_line_length": 33.74713418745785,
"alnum_prop": 0.5692848722201131,
"repo_name": "pravsripad/jumeg",
"id": "af120a050e53a6d77e68873839440d78c9c48448",
"size": "50047",
"binary": false,
"copies": "1",
"ref": "refs/heads/master_dev",
"path": "jumeg/jumeg_utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "2309512"
}
],
"symlink_target": ""
} |
import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
class magicdictlist(dict):
"""Helper class to add conveniences to dict."""
def __getitem__(self, key):
"""Extend getitem behavior to create if not found."""
if key not in self:
dict.__setitem__(self, key, list())
return dict.__getitem__(self, key)
def dedupe(self):
"""Return new magicdictlist with de-duplicated lists."""
newdictlist = magicdictlist()
for key in self.keys():
newdictlist[key] = list(set(dict.__getitem__(self, key)))
return newdictlist
def retriable_session(total=3, backoff_factor=0.3, status_forcelist=(500, 502, 504)):
"""Prepare a requests.Session that has automatic retry enabled."""
retry = Retry(
total=total,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist,
)
adapter = HTTPAdapter(max_retries=retry)
session = requests.Session()
session.mount('http://', adapter)
session.mount('https://', adapter)
return session
| {
"content_hash": "44124c62e9c4313040635733423d71e3",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 85,
"avg_line_length": 32.529411764705884,
"alnum_prop": 0.64376130198915,
"repo_name": "infinitewarp/trekipsum",
"id": "4d29467ab5be8bdf561837aa8bebdb4e46c98681",
"size": "1106",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "trekipsum/scrape/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3274"
},
{
"name": "Python",
"bytes": "78044"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.