sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def set_rainbow(self, duration):
"""Turn the bulb on and create a rainbow."""
for i in range(0, 359):
self.set_color_hsv(i, 100, 100)
time.sleep(duration/359) | Turn the bulb on and create a rainbow. | entailment |
def set_sunrise(self, duration):
"""Turn the bulb on and create a sunrise."""
self.set_transition_time(duration/100)
for i in range(0, duration):
try:
data = "action=on&color=3;{}".format(i)
request = requests.post(
'{}/{}/{}'.format(self.resource, URI, self._mac),
data=data, timeout=self.timeout)
if request.status_code == 200:
self.data['on'] = True
except requests.exceptions.ConnectionError:
raise exceptions.MyStromConnectionError()
time.sleep(duration/100) | Turn the bulb on and create a sunrise. | entailment |
def set_flashing(self, duration, hsv1, hsv2):
"""Turn the bulb on, flashing with two colors."""
self.set_transition_time(100)
for step in range(0, int(duration/2)):
self.set_color_hsv(hsv1[0], hsv1[1], hsv1[2])
time.sleep(1)
self.set_color_hsv(hsv2[0], hsv2[1], hsv2[2])
time.sleep(1) | Turn the bulb on, flashing with two colors. | entailment |
def set_off(self):
"""Turn the bulb off."""
try:
request = requests.post(
'{}/{}/{}/'.format(self.resource, URI, self._mac),
data={'action': 'off'}, timeout=self.timeout)
if request.status_code == 200:
pass
except requests.exceptions.ConnectionError:
raise exceptions.MyStromConnectionError() | Turn the bulb off. | entailment |
def translate_chromosome(self, chromosome):
"""
Translate all the genes in a chromosome.
chromosome: a ``chromosomes.Chromosome`` instance to translate
return: a list of translation products for the genes in the chromosome
"""
assert isinstance(chromosome, Chromosome)
return [self.translate_gene(g) for g in chromosome] | Translate all the genes in a chromosome.
chromosome: a ``chromosomes.Chromosome`` instance to translate
return: a list of translation products for the genes in the chromosome | entailment |
def translate_gene(self, gene):
"""
Translate a gene with binary DNA into a base-10 floating point real number.
Parses the DNA in this manner:
1. The first bit determines the sign of the integer portion of the result (0=positive, 1=negative)
2. The next ``significand_length`` bits of the DNA are converted into a base-10 integer,
and given a positive/negative sign based on step (1).
3. The next bit determines the sign of the exponent portion of the result (0=positive, 1=negative)
4. The remaining bits in the DNA are converted into a base-10 integer, and given a
positive/negative sign based on step (3).
5. The result of step (2) is multiplied by 10 raised to the power of the result of step (4).
Example: let DNA="001111", significand_length=3
1. "0" indicates a positive sign for the integer portion
2. "011" is converted into the base-10 integer 3, its sign stays positive due to step (1)
3. "1" indicates a negative sign for the exponent portion
4. The remaining "1" bit is converted into the base-10 integer 1 and becomes -1 due to step (3)
5. The final result becomes: 3 * 10^-1 = 0.3
"""
if self.signed:
sign = 1 if gene.dna[0] == '0' else -1
base_start_idx = 1
else:
sign = 1
base_start_idx = 0
base = sign * int(gene.dna[base_start_idx:base_start_idx + self.significand_length], base=2)
exponent_sign = 1 if gene.dna[1 + self.significand_length] == '0' else -1
exponent = exponent_sign * int(gene.dna[self.significand_length + 2:], base=2)
return float(base * 10 ** exponent) | Translate a gene with binary DNA into a base-10 floating point real number.
Parses the DNA in this manner:
1. The first bit determines the sign of the integer portion of the result (0=positive, 1=negative)
2. The next ``significand_length`` bits of the DNA are converted into a base-10 integer,
and given a positive/negative sign based on step (1).
3. The next bit determines the sign of the exponent portion of the result (0=positive, 1=negative)
4. The remaining bits in the DNA are converted into a base-10 integer, and given a
positive/negative sign based on step (3).
5. The result of step (2) is multiplied by 10 raised to the power of the result of step (4).
Example: let DNA="001111", significand_length=3
1. "0" indicates a positive sign for the integer portion
2. "011" is converted into the base-10 integer 3, its sign stays positive due to step (1)
3. "1" indicates a negative sign for the exponent portion
4. The remaining "1" bit is converted into the base-10 integer 1 and becomes -1 due to step (3)
5. The final result becomes: 3 * 10^-1 = 0.3 | entailment |
def load_clients(self, path=None, apis=[]):
"""Generate client libraries for the given apis, without starting an
api server"""
if not path:
raise Exception("Missing path to api swagger files")
if type(apis) is not list:
raise Exception("'apis' should be a list of api names")
if len(apis) == 0:
raise Exception("'apis' is an empty list - Expected at least one api name")
for api_name in apis:
api_path = os.path.join(path, '%s.yaml' % api_name)
if not os.path.isfile(api_path):
raise Exception("Cannot find swagger specification at %s" % api_path)
log.info("Loading api %s from %s" % (api_name, api_path))
ApiPool.add(
api_name,
yaml_path=api_path,
timeout=self.timeout,
error_callback=self.error_callback,
formats=self.formats,
do_persist=False,
local=False,
)
return self | Generate client libraries for the given apis, without starting an
api server | entailment |
def load_apis(self, path, ignore=[], include_crash_api=False):
"""Load all swagger files found at the given path, except those whose
names are in the 'ignore' list"""
if not path:
raise Exception("Missing path to api swagger files")
if type(ignore) is not list:
raise Exception("'ignore' should be a list of api names")
# Always ignore pym-config.yaml
ignore.append('pym-config')
# Find all swagger apis under 'path'
apis = {}
log.debug("Searching path %s" % path)
for root, dirs, files in os.walk(path):
for f in files:
if f.endswith('.yaml'):
api_name = f.replace('.yaml', '')
if api_name in ignore:
log.info("Ignoring api %s" % api_name)
continue
apis[api_name] = os.path.join(path, f)
log.debug("Found api %s in %s" % (api_name, f))
# And add pymacaron's default ping and crash apis
for name in ['ping', 'crash']:
yaml_path = pkg_resources.resource_filename(__name__, 'pymacaron/%s.yaml' % name)
if not os.path.isfile(yaml_path):
yaml_path = os.path.join(os.path.dirname(sys.modules[__name__].__file__), '%s.yaml' % name)
apis[name] = yaml_path
if not include_crash_api:
del apis['crash']
# Save found apis
self.path_apis = path
self.apis = apis
return self | Load all swagger files found at the given path, except those whose
names are in the 'ignore' list | entailment |
def publish_apis(self, path='doc'):
"""Publish all loaded apis on under the uri /<path>/<api-name>, by
redirecting to http://petstore.swagger.io/
"""
assert path
if not self.apis:
raise Exception("You must call .load_apis() before .publish_apis()")
# Infer the live host url from pym-config.yaml
proto = 'http'
if hasattr(get_config(), 'aws_cert_arn'):
proto = 'https'
live_host = "%s://%s" % (proto, get_config().live_host)
# Allow cross-origin calls
CORS(self.app, resources={r"/%s/*" % path: {"origins": "*"}})
# Add routes to serve api specs and redirect to petstore ui for each one
for api_name, api_path in self.apis.items():
api_filename = os.path.basename(api_path)
log.info("Publishing api %s at /%s/%s" % (api_name, path, api_name))
def redirect_to_petstore(live_host, api_filename):
def f():
url = 'http://petstore.swagger.io/?url=%s/%s/%s' % (live_host, path, api_filename)
log.info("Redirecting to %s" % url)
return redirect(url, code=302)
return f
def serve_api_spec(api_path):
def f():
with open(api_path, 'r') as f:
spec = f.read()
log.info("Serving %s" % api_path)
return Response(spec, mimetype='text/plain')
return f
self.app.add_url_rule('/%s/%s' % (path, api_name), str(uuid4()), redirect_to_petstore(live_host, api_filename))
self.app.add_url_rule('/%s/%s' % (path, api_filename), str(uuid4()), serve_api_spec(api_path))
return self | Publish all loaded apis on under the uri /<path>/<api-name>, by
redirecting to http://petstore.swagger.io/ | entailment |
def start(self, serve=[]):
"""Load all apis, either as local apis served by the flask app, or as
remote apis to be called from whithin the app's endpoints, then start
the app server"""
# Check arguments
if type(serve) is str:
serve = [serve]
elif type(serve) is list:
pass
else:
raise Exception("'serve' should be an api name or a list of api names")
if len(serve) == 0:
raise Exception("You must specify at least one api to serve")
for api_name in serve:
if api_name not in self.apis:
raise Exception("Can't find %s.yaml (swagger file) in the api directory %s" % (api_name, self.path_apis))
app = self.app
app.secret_key = os.urandom(24)
# Initialize JWT config
conf = get_config()
if hasattr(conf, 'jwt_secret'):
log.info("Set JWT parameters to issuer=%s audience=%s secret=%s***" % (
conf.jwt_issuer,
conf.jwt_audience,
conf.jwt_secret[0:8],
))
# Always serve the ping api
serve.append('ping')
# Let's compress returned data when possible
compress = Compress()
compress.init_app(app)
# All apis that are not served locally are not persistent
not_persistent = []
for api_name in self.apis.keys():
if api_name in serve:
pass
else:
not_persistent.append(api_name)
# Now load those apis into the ApiPool
for api_name, api_path in self.apis.items():
host = None
port = None
if api_name in serve:
# We are serving this api locally: override the host:port specified in the swagger spec
host = self.host
port = self.port
do_persist = True if api_name not in not_persistent else False
local = True if api_name in serve else False
log.info("Loading api %s from %s (persist: %s)" % (api_name, api_path, do_persist))
ApiPool.add(
api_name,
yaml_path=api_path,
timeout=self.timeout,
error_callback=self.error_callback,
formats=self.formats,
do_persist=do_persist,
host=host,
port=port,
local=local,
)
ApiPool.merge()
# Now spawn flask routes for all endpoints
for api_name in self.apis.keys():
if api_name in serve:
log.info("Spawning api %s" % api_name)
api = getattr(ApiPool, api_name)
# Spawn api and wrap every endpoint in a crash handler that
# catches replies and reports errors
api.spawn_api(app, decorator=generate_crash_handler_decorator(self.error_decorator))
log.debug("Argv is [%s]" % ' '.join(sys.argv))
if 'celery' in sys.argv[0].lower():
# This code is loading in a celery server - Don't start the actual flask app.
log.info("Running in a Celery worker - Not starting the Flask app")
return
# Initialize monitoring, if any is defined
monitor_init(app=app, config=conf)
if os.path.basename(sys.argv[0]) == 'gunicorn':
# Gunicorn takes care of spawning workers
log.info("Running in Gunicorn - Not starting the Flask app")
return
# Debug mode is the default when not running via gunicorn
app.debug = self.debug
app.run(host='0.0.0.0', port=self.port) | Load all apis, either as local apis served by the flask app, or as
remote apis to be called from whithin the app's endpoints, then start
the app server | entailment |
def add_error(name=None, code=None, status=None):
"""Create a new Exception class"""
if not name or not status or not code:
raise Exception("Can't create Exception class %s: you must set both name, status and code" % name)
myexception = type(name, (PyMacaronException, ), {"code": code, "status": status})
globals()[name] = myexception
if code in code_to_class:
raise Exception("ERROR! Exception %s is already defined." % code)
code_to_class[code] = myexception
return myexception | Create a new Exception class | entailment |
def responsify(error):
"""Take an Error model and return it as a Flask response"""
assert str(type(error).__name__) == 'Error'
if error.error in code_to_class:
e = code_to_class[error.error](error.error_description)
if error.error_id:
e.error_id = error.error_id
if error.user_message:
e.user_message = error.user_message
return e.http_reply()
elif isinstance(error, PyMacaronException):
return error.http_reply()
else:
return PyMacaronException("Caught un-mapped error: %s" % error).http_reply() | Take an Error model and return it as a Flask response | entailment |
def is_error(o):
"""True if o is an instance of a swagger Error model or a flask Response of
an error model"""
if hasattr(o, 'error') and hasattr(o, 'error_description') and hasattr(o, 'status'):
return True
return False | True if o is an instance of a swagger Error model or a flask Response of
an error model | entailment |
def format_error(e):
"""Take an exception caught within pymacaron_core and turn it into a
bravado-core Error instance
"""
if isinstance(e, PyMacaronException):
return e.to_model()
if isinstance(e, PyMacaronCoreException) and e.__class__.__name__ == 'ValidationError':
return ValidationError(str(e)).to_model()
# Turn this exception into a PyMacaron Error model
return UnhandledServerError(str(e)).to_model() | Take an exception caught within pymacaron_core and turn it into a
bravado-core Error instance | entailment |
def raise_error(e):
"""Take a bravado-core Error model and raise it as an exception"""
code = e.error
if code in code_to_class:
raise code_to_class[code](e.error_description)
else:
raise InternalServerError(e.error_description) | Take a bravado-core Error model and raise it as an exception | entailment |
def http_reply(self):
"""Return a Flask reply object describing this error"""
data = {
'status': self.status,
'error': self.code.upper(),
'error_description': str(self)
}
if self.error_caught:
data['error_caught'] = pformat(self.error_caught)
if self.error_id:
data['error_id'] = self.error_id
if self.user_message:
data['user_message'] = self.user_message
r = jsonify(data)
r.status_code = self.status
if str(self.status) != "200":
log.warn("ERROR: caught error %s %s [%s]" % (self.status, self.code, str(self)))
return r | Return a Flask reply object describing this error | entailment |
def to_model(self):
"""Return a bravado-core Error instance"""
e = ApiPool().current_server_api.model.Error(
status=self.status,
error=self.code.upper(),
error_description=str(self),
)
if self.error_id:
e.error_id = self.error_id
if self.user_message:
e.user_message = self.user_message
if self.error_caught:
e.error_caught = pformat(self.error_caught)
return e | Return a bravado-core Error instance | entailment |
def compute_fitness_cdf(chromosomes, ga):
"""
Return a list of fitness-weighted cumulative probabilities for a set of chromosomes.
chromosomes: chromosomes to use for fitness-based calculations
ga: ``algorithms.BaseGeneticAlgorithm`` used to obtain fitness values using its ``eval_fitness`` method
return: list of fitness-weighted cumulative probabilities in [0, 1]
"""
ga.sort(chromosomes)
fitness = [ga.eval_fitness(c) for c in chromosomes]
min_fit = min(fitness)
fit_range = max(fitness) - min_fit
if fit_range == 0:
# all chromosomes have equal chance of being chosen
n = len(chromosomes)
return [i / n for i in range(1, n + 1)]
return [(fit - min_fit) / fit_range for fit in fitness] | Return a list of fitness-weighted cumulative probabilities for a set of chromosomes.
chromosomes: chromosomes to use for fitness-based calculations
ga: ``algorithms.BaseGeneticAlgorithm`` used to obtain fitness values using its ``eval_fitness`` method
return: list of fitness-weighted cumulative probabilities in [0, 1] | entailment |
def weighted_choice(seq, cdf):
"""
Select a random element from a sequence, given cumulative probabilities of selection.
See ``compute_fitness_cdf`` function for obtaining cumulative probabilities.
seq: sequence to select from
cdf: sequence with 1 cumulative probability value in [0, 1] for each element in ``seq``
return: randomly selected element
"""
assert len(seq) == len(cdf)
rand = random.random()
for i, e in enumerate(seq):
cp = cdf[i]
assert 0 <= cp <= 1
if rand < cp:
return e | Select a random element from a sequence, given cumulative probabilities of selection.
See ``compute_fitness_cdf`` function for obtaining cumulative probabilities.
seq: sequence to select from
cdf: sequence with 1 cumulative probability value in [0, 1] for each element in ``seq``
return: randomly selected element | entailment |
def _list(self, path, dim_key=None, **kwargs):
"""Get a list of metrics."""
url_str = self.base_url + path
if dim_key and dim_key in kwargs:
dim_str = self.get_dimensions_url_string(kwargs[dim_key])
kwargs[dim_key] = dim_str
if kwargs:
url_str += '?%s' % parse.urlencode(kwargs, True)
body = self.client.list(
path=url_str
)
return self._parse_body(body) | Get a list of metrics. | entailment |
def mapstr_to_list(mapstr):
""" Convert an ASCII map string with rows to a list of strings, 1 string per row. """
maplist = []
with StringIO(mapstr) as infile:
for row in infile:
maplist.append(row.strip())
return maplist | Convert an ASCII map string with rows to a list of strings, 1 string per row. | entailment |
def sprinkler_reaches_cell(x, y, sx, sy, r):
"""
Return whether a cell is within the radius of the sprinkler.
x: column index of cell
y: row index of cell
sx: column index of sprinkler
sy: row index of sprinkler
r: sprinkler radius
"""
dx = sx - x
dy = sy - y
return math.sqrt(dx ** 2 + dy ** 2) <= r | Return whether a cell is within the radius of the sprinkler.
x: column index of cell
y: row index of cell
sx: column index of sprinkler
sy: row index of sprinkler
r: sprinkler radius | entailment |
def eval_fitness(self, chromosome):
"""
Return the number of plants reached by the sprinkler.
Returns a large penalty for sprinkler locations outside the map.
"""
# convert DNA to represented sprinkler coordinates
sx, sy = self.translator.translate_chromosome(chromosome)
# check for invalid points
penalty = 0
if sx >= self.w:
penalty += self.w * self.h
if sy >= self.h:
penalty += self.w * self.h
if penalty > 0:
self.fitness_cache[chromosome.dna] = -penalty
return -penalty
# calculate number of crop cells watered by sprinkler
crops_watered = 0
# check bounding box around sprinkler for crop cells
# circle guaranteed to be within square with side length 2*r around sprinkler
row_start_idx = max(0, sy - self.r)
row_end_idx = min(sy + self.r + 1, self.h)
col_start_idx = max(0, sx - self.r)
col_end_idx = min(sx + self.r + 1, self.w)
for y, row in enumerate(self.maplist[row_start_idx:row_end_idx], row_start_idx):
for x, cell in enumerate(row[col_start_idx:col_end_idx], col_start_idx):
if cell == 'x' and sprinkler_reaches_cell(x, y, sx, sy, self.r):
crops_watered += 1
# reduce score by 1 if sprinkler placed on a crop cell
if self.maplist[sy][sx] == 'x':
crops_watered -= 1
self.fitness_cache[chromosome.dna] = crops_watered
return crops_watered | Return the number of plants reached by the sprinkler.
Returns a large penalty for sprinkler locations outside the map. | entailment |
def map_sprinkler(self, sx, sy, watered_crop='^', watered_field='_', dry_field=' ', dry_crop='x'):
"""
Return a version of the ASCII map showing reached crop cells.
"""
# convert strings (rows) to lists of characters for easier map editing
maplist = [list(s) for s in self.maplist]
for y, row in enumerate(maplist):
for x, cell in enumerate(row):
if sprinkler_reaches_cell(x, y, sx, sy, self.r):
if cell == 'x':
cell = watered_crop
else:
cell = watered_field
else:
cell = dry_crop if cell == 'x' else dry_field
maplist[y][x] = cell
maplist[sy][sx] = 'O' # sprinkler
return '\n'.join([''.join(row) for row in maplist]) | Return a version of the ASCII map showing reached crop cells. | entailment |
def index(request, template_name="index.html"):
"""\
The index view, which basically just displays a button and increments
a counter.
"""
if request.GET.get('ic-request'):
counter, created = Counter.objects.get_or_create(pk=1)
counter.value += 1
counter.save()
else:
counter, created = Counter.objects.get_or_create(pk=1)
print(counter.value)
context = dict(
value=counter.value,
)
return render(request, template_name, context=context) | \
The index view, which basically just displays a button and increments
a counter. | entailment |
def get_fitness(self, chromosome):
""" Get the fitness score for a chromosome, using the cached value if available. """
fitness = self.fitness_cache.get(chromosome.dna)
if fitness is None:
fitness = self.eval_fitness(chromosome)
self.fitness_cache[chromosome.dna] = fitness
return fitness | Get the fitness score for a chromosome, using the cached value if available. | entailment |
def compete(self, chromosomes):
"""
Simulate competition/survival of the fittest.
The fitness of each chromosome is used to calculate a survival probability
based on how it compares to the overall fitness range of the run and the
fitness range of the current generation. The ``abs_fit_weight`` and
``rel_fit_weight`` attributes determine the degree to which overall and
current fitness ranges affect the final survival probability.
return: list of surviving chromosomes
"""
# update overall fitness for this run
self.sort(chromosomes)
min_fit = self.get_fitness(chromosomes[0])
max_fit = self.get_fitness(chromosomes[-1])
if min_fit < self.min_fit_ever:
self.min_fit_ever = min_fit
if max_fit > self.max_fit_ever:
self.max_fit_ever = max_fit
overall_fit_range = self.max_fit_ever - self.min_fit_ever # "absolute" fitness range
current_fit_range = max_fit - min_fit # "relative" fitness range
# choose survivors based on relative fitness within overall fitness range
survivors = []
for chromosome in chromosomes:
fit = self.get_fitness(chromosome)
p_survival_absolute = (fit - self.min_fit_ever) / overall_fit_range if overall_fit_range != 0 else 1
p_survival_relative = (fit - min_fit) / current_fit_range if current_fit_range != 0 else 1
# compute weighted average survival probability
# a portion accounts for absolute overall fitness for all chromosomes ever encountered (environment-driven)
# the other portion accounts for relative fitness within the current population (competition-driven)
p_survival = p_survival_absolute * self.abs_fit_weight + p_survival_relative * self.rel_fit_weight
if random.random() < p_survival:
survivors.append(chromosome)
if not survivors:
# rarely, nothing survives -- allow everyone to live
return chromosomes
return survivors | Simulate competition/survival of the fittest.
The fitness of each chromosome is used to calculate a survival probability
based on how it compares to the overall fitness range of the run and the
fitness range of the current generation. The ``abs_fit_weight`` and
``rel_fit_weight`` attributes determine the degree to which overall and
current fitness ranges affect the final survival probability.
return: list of surviving chromosomes | entailment |
def reproduce(self, survivors, p_crossover, two_point_crossover=False, target_size=None):
"""
Reproduces the population from a pool of surviving chromosomes
until a target population size is met. Offspring are created
by selecting a survivor. Survivors with higher fitness have a
greater chance to be selected for reproduction.
Genetic crossover events may occur for each offspring created.
Crossover mates are randomly selected from the pool of survivors.
Crossover points are randomly selected from the length of the crossed chromosomes.
If crossover does not occur, an offspring is an exact copy of the selected survivor.
Crossover only affects the DNA of the offspring, not the survivors/parents.
survivors: pool of parent chromosomes to reproduce from
p_crossover: probability in [0, 1] that a crossover event will
occur for each offspring
two_point_crossover (default=False): whether 2-point crossover is used; default is 1-point
target_size (default=original population size): target population size
return: list of survivors plus any offspring
"""
assert 0 <= p_crossover <= 1
if not target_size:
target_size = self.orig_pop_size
num_survivors = len(survivors)
# compute reproduction cumulative probabilities
# weakest member gets p=0 but can be crossed-over with
cdf = compute_fitness_cdf(survivors, self)
offspring = []
while num_survivors + len(offspring) < target_size:
# pick a survivor to reproduce
c1 = weighted_choice(survivors, cdf).copy()
# crossover
if random.random() < p_crossover:
# randomly pick a crossover mate from survivors
# same chromosome can be c1 and c2
c2 = random.choice(survivors).copy()
point1 = random.randrange(0, c1.length)
point2 = random.randrange(point1 + 1, c1.length + 1) if two_point_crossover else None
c1.crossover(c2, point1, point2)
offspring.append(c1)
return survivors + offspring | Reproduces the population from a pool of surviving chromosomes
until a target population size is met. Offspring are created
by selecting a survivor. Survivors with higher fitness have a
greater chance to be selected for reproduction.
Genetic crossover events may occur for each offspring created.
Crossover mates are randomly selected from the pool of survivors.
Crossover points are randomly selected from the length of the crossed chromosomes.
If crossover does not occur, an offspring is an exact copy of the selected survivor.
Crossover only affects the DNA of the offspring, not the survivors/parents.
survivors: pool of parent chromosomes to reproduce from
p_crossover: probability in [0, 1] that a crossover event will
occur for each offspring
two_point_crossover (default=False): whether 2-point crossover is used; default is 1-point
target_size (default=original population size): target population size
return: list of survivors plus any offspring | entailment |
def mutate(self, chromosomes, p_mutate):
"""
Call every chromosome's ``mutate`` method.
p_mutate: probability of mutation in [0, 1]
"""
assert 0 <= p_mutate <= 1
for chromosome in chromosomes:
chromosome.mutate(p_mutate) | Call every chromosome's ``mutate`` method.
p_mutate: probability of mutation in [0, 1] | entailment |
def run(self, generations, p_mutate, p_crossover, elitist=True, two_point_crossover=False,
refresh_after=None, quit_after=None):
"""
Run a standard genetic algorithm simulation for a set number
of generations (iterations), each consisting of the following
ordered steps:
1. competition/survival of the fittest (``compete`` method)
2. reproduction (``reproduce`` method)
3. mutation (``mutate`` method)
4. check if the new population's fittest is fitter than the overall fittest
4a. if not and the ``elitist`` option is active, replace the weakest solution
with the overall fittest
generations: how many generations to run
p_mutate: probability of mutation in [0, 1]
p_crossover: probability in [0, 1] that a crossover event will occur for each offspring
elitist (default=True): option to replace the weakest solution with the
strongest if a new one is not found each generation
two_point_crossover (default=False): whether 2-point crossover is used
refresh_after: number of generations since the last upset after which to randomly generate a new population
quit_after: number of generations since the last upset after which to stop the run, possibly before reaching
``generations`` iterations
return: the overall fittest solution (chromosome)
"""
start_time = time.time()
assert 0 <= p_mutate <= 1
assert 0 <= p_crossover <= 1
# these values guaranteed to be replaced in first generation
self.min_fit_ever = 1e999999999
self.max_fit_ever = -1e999999999
self.generation_fittest.clear()
self.generation_fittest_fit.clear()
self.overall_fittest_fit.clear()
self.new_fittest_generations.clear()
overall_fittest = self.get_fittest()
overall_fittest_fit = self.get_fitness(overall_fittest)
gens_since_upset = 0
for gen in range(1, generations + 1):
survivors = self.compete(self.chromosomes)
self.chromosomes = self.reproduce(survivors, p_crossover, two_point_crossover=two_point_crossover)
self.mutate(self.chromosomes, p_mutate)
# check for new fittest
gen_fittest = self.get_fittest().copy()
gen_fittest_fit = self.get_fitness(gen_fittest)
if gen_fittest_fit > overall_fittest_fit:
overall_fittest = gen_fittest
overall_fittest_fit = gen_fittest_fit
self.new_fittest_generations.append(gen)
gens_since_upset = 0
else:
gens_since_upset += 1
if elitist:
# no new fittest found, replace least fit with overall fittest
self.sort(self.chromosomes)
self.chromosomes[0].dna = overall_fittest.dna
if quit_after and gens_since_upset >= quit_after:
print("quitting on generation", gen, "after", quit_after, "generations with no upset")
break
if refresh_after and gens_since_upset >= refresh_after:
# been a very long time since a new best solution -- mix things up
print("refreshing on generation", gen)
self.mutate(self.chromosomes, 0.5)
gens_since_upset = 0
self.generation_fittest[gen] = gen_fittest
self.generation_fittest_fit[gen] = gen_fittest_fit
self.overall_fittest_fit[gen] = overall_fittest_fit
if self.should_terminate(overall_fittest):
break
self.fitness_cache.clear()
self.run_time_s = time.time() - start_time
return overall_fittest | Run a standard genetic algorithm simulation for a set number
of generations (iterations), each consisting of the following
ordered steps:
1. competition/survival of the fittest (``compete`` method)
2. reproduction (``reproduce`` method)
3. mutation (``mutate`` method)
4. check if the new population's fittest is fitter than the overall fittest
4a. if not and the ``elitist`` option is active, replace the weakest solution
with the overall fittest
generations: how many generations to run
p_mutate: probability of mutation in [0, 1]
p_crossover: probability in [0, 1] that a crossover event will occur for each offspring
elitist (default=True): option to replace the weakest solution with the
strongest if a new one is not found each generation
two_point_crossover (default=False): whether 2-point crossover is used
refresh_after: number of generations since the last upset after which to randomly generate a new population
quit_after: number of generations since the last upset after which to stop the run, possibly before reaching
``generations`` iterations
return: the overall fittest solution (chromosome) | entailment |
def create_random(cls, gene_length, n=1, gene_class=BinaryGene):
"""
Create 1 or more chromosomes with randomly generated DNA.
gene_length: int (or sequence of ints) describing gene DNA length
n: number of chromosomes to create (default=1); returns a list if n>1, else a single chromosome
gene_class: subclass of ``ga.chromosomes.BaseGene`` to use for genes
return: new chromosome
"""
assert issubclass(gene_class, BaseGene)
chromosomes = []
# when gene_length is scalar, convert to a list to keep subsequent code simple
if not hasattr(gene_length, '__iter__'):
gene_length = [gene_length]
for _ in range(n):
genes = [gene_class.create_random(length) for length in gene_length]
chromosomes.append(cls(genes))
if n == 1:
return chromosomes[0]
else:
return chromosomes | Create 1 or more chromosomes with randomly generated DNA.
gene_length: int (or sequence of ints) describing gene DNA length
n: number of chromosomes to create (default=1); returns a list if n>1, else a single chromosome
gene_class: subclass of ``ga.chromosomes.BaseGene`` to use for genes
return: new chromosome | entailment |
def dna(self, dna):
"""
Replace this chromosome's DNA with new DNA of equal length,
assigning the new DNA to the chromosome's genes sequentially.
For example, if a chromosome contains these genes...
1. 100100
2. 011011
...and the new DNA is 111111000000, the genes become:
1. 111111
2. 000000
"""
assert self.length == len(dna)
i = 0
for gene in self.genes:
gene.dna = dna[i:i + gene.length]
i += gene.length | Replace this chromosome's DNA with new DNA of equal length,
assigning the new DNA to the chromosome's genes sequentially.
For example, if a chromosome contains these genes...
1. 100100
2. 011011
...and the new DNA is 111111000000, the genes become:
1. 111111
2. 000000 | entailment |
def crossover(self, chromosome, point1, point2=None):
"""
Exchange DNA with another chromosome of equal length at one or two common points.
For example, consider chromosomes:
1. 11110000
2. 00001111
If the crossover point is 4, the exchange results in a new DNA arrangement:
1. 11111111
2. 00000000
If 2 points are used--3 and 6--this happens:
1. 11001100
2. 00110011
chromosome: other ``Chromosome`` to exchange DNA with
point1: zero-based index used for the first (and possibly only) crossover point
point2: zero-based index used for the second (optional) crossover point; must be > point1
"""
assert self.length == chromosome.length
if point2 is None:
new_dna = self.dna[:point1] + chromosome.dna[point1:]
other_new_dna = chromosome.dna[:point1] + self.dna[point1:]
self.dna = new_dna
chromosome.dna = other_new_dna
else:
assert point2 > point1
self_substr = self.dna[point1:point2 + 1]
other_substr = chromosome.dna[point1:point2 + 1]
self.dna = self.dna[:point1] + other_substr + self.dna[point2 + 1:]
chromosome.dna = chromosome.dna[:point1] + self_substr + chromosome.dna[point2 + 1:] | Exchange DNA with another chromosome of equal length at one or two common points.
For example, consider chromosomes:
1. 11110000
2. 00001111
If the crossover point is 4, the exchange results in a new DNA arrangement:
1. 11111111
2. 00000000
If 2 points are used--3 and 6--this happens:
1. 11001100
2. 00110011
chromosome: other ``Chromosome`` to exchange DNA with
point1: zero-based index used for the first (and possibly only) crossover point
point2: zero-based index used for the second (optional) crossover point; must be > point1 | entailment |
def mutate(self, p_mutate):
"""
Check all genes in this chromosome for mutation.
p_mutate: probability for mutation to occur
"""
assert 0 <= p_mutate <= 1
for gene in self.genes:
gene.mutate(p_mutate) | Check all genes in this chromosome for mutation.
p_mutate: probability for mutation to occur | entailment |
def copy(self):
""" Return a new instance of this chromosome by copying its genes. """
genes = [g.copy() for g in self.genes]
return type(self)(genes) | Return a new instance of this chromosome by copying its genes. | entailment |
def check_genes(self):
""" Assert that every DNA choice is represented by exactly one gene. """
gene_dna_set = set([g.dna for g in self.genes])
assert gene_dna_set == self.dna_choices_set | Assert that every DNA choice is represented by exactly one gene. | entailment |
def report_error(title=None, data={}, caught=None, is_fatal=False):
"""Format a crash report and send it somewhere relevant. There are two
types of crashes: fatal crashes (backend errors) or non-fatal ones (just
reporting a glitch, but the api call did not fail)"""
# Don't report errors if NO_ERROR_REPORTING set to 1 (set by run_acceptance_tests)
if os.environ.get('DO_REPORT_ERROR', None):
# Force error reporting
pass
elif os.environ.get('NO_ERROR_REPORTING', '') == '1':
log.info("NO_ERROR_REPORTING is set: not reporting error!")
return
elif 'is_ec2_instance' in data:
if not data['is_ec2_instance']:
# Not running on amazon: no reporting
log.info("DATA[is_ec2_instance] is False: not reporting error!")
return
elif not is_ec2_instance():
log.info("Not running on an EC2 instance: not reporting error!")
return
# Fill error report with tons of usefull data
if 'user' not in data:
populate_error_report(data)
# Add the message
data['title'] = title
data['is_fatal_error'] = is_fatal
# Add the error caught, if any:
if caught:
data['error_caught'] = "%s" % caught
# Add a trace - Formatting traceback may raise a UnicodeDecodeError...
data['stack'] = []
try:
data['stack'] = [l for l in traceback.format_stack()]
except Exception:
data['stack'] = 'Skipped trace - contained non-ascii chars'
# inspect may raise a UnicodeDecodeError...
fname = ''
try:
fname = inspect.stack()[1][3]
except Exception as e:
fname = 'unknown-method'
# Format the error's title
status, code = 'unknown_status', 'unknown_error_code'
if 'response' in data:
status = data['response'].get('status', status)
code = data['response'].get('error_code', code)
title_details = "%s %s %s" % (ApiPool().current_server_name, status, code)
else:
title_details = "%s %s()" % (ApiPool().current_server_name, fname)
if is_fatal:
title_details = 'FATAL ERROR %s' % title_details
else:
title_details = 'NON-FATAL ERROR %s' % title_details
if title:
title = "%s: %s" % (title_details, title)
else:
title = title_details
global error_reporter
log.info("Reporting crash...")
try:
error_reporter(title, json.dumps(data, sort_keys=True, indent=4))
except Exception as e:
# Don't block on replying to api caller
log.error("Failed to send email report: %s" % str(e)) | Format a crash report and send it somewhere relevant. There are two
types of crashes: fatal crashes (backend errors) or non-fatal ones (just
reporting a glitch, but the api call did not fail) | entailment |
def populate_error_report(data):
"""Add generic stats to the error report"""
# Did pymacaron_core set a call_id and call_path?
call_id, call_path = '', ''
if hasattr(stack.top, 'call_id'):
call_id = stack.top.call_id
if hasattr(stack.top, 'call_path'):
call_path = stack.top.call_path
# Unique ID associated to all responses associated to a given
# call to apis, across all micro-services
data['call_id'] = call_id
data['call_path'] = call_path
# Are we in aws?
data['is_ec2_instance'] = is_ec2_instance()
# If user is authenticated, get her id
user_data = {
'id': '',
'is_auth': 0,
'ip': '',
}
if stack.top:
# We are in a request context
user_data['ip'] = request.remote_addr
if 'X-Forwarded-For' in request.headers:
user_data['forwarded_ip'] = request.headers.get('X-Forwarded-For', '')
if 'User-Agent' in request.headers:
user_data['user_agent'] = request.headers.get('User-Agent', '')
if hasattr(stack.top, 'current_user'):
user_data['is_auth'] = 1
user_data['id'] = stack.top.current_user.get('sub', '')
for k in ('name', 'email', 'is_expert', 'is_admin', 'is_support', 'is_tester', 'language'):
v = stack.top.current_user.get(k, None)
if v:
user_data[k] = v
data['user'] = user_data
# Is the current code running as a server?
if ApiPool().current_server_api:
# Server info
server = request.base_url
server = server.replace('http://', '')
server = server.replace('https://', '')
server = server.split('/')[0]
parts = server.split(':')
fqdn = parts[0]
port = parts[1] if len(parts) == 2 else ''
data['server'] = {
'fqdn': fqdn,
'port': port,
'api_name': ApiPool().current_server_name,
'api_version': ApiPool().current_server_api.get_version(),
}
# Endpoint data
data['endpoint'] = {
'id': "%s %s %s" % (ApiPool().current_server_name, request.method, request.path),
'url': request.url,
'base_url': request.base_url,
'path': request.path,
'method': request.method
} | Add generic stats to the error report | entailment |
def generate_crash_handler_decorator(error_decorator=None):
"""Return the crash_handler to pass to pymacaron_core, with optional error decoration"""
def crash_handler(f):
"""Return a decorator that reports failed api calls via the error_reporter,
for use on every server endpoint"""
@wraps(f)
def wrapper(*args, **kwargs):
"""Generate a report of this api call, and if the call failed or was too slow,
forward this report via the error_reporter"""
data = {}
t0 = timenow()
exception_string = ''
# Call endpoint and log execution time
try:
res = f(*args, **kwargs)
except Exception as e:
# An unhandled exception occured!
exception_string = str(e)
exc_type, exc_value, exc_traceback = sys.exc_info()
trace = traceback.format_exception(exc_type, exc_value, exc_traceback, 30)
data['trace'] = trace
# If it is a PyMacaronException, just call its http_reply()
if hasattr(e, 'http_reply'):
res = e.http_reply()
else:
# Otherwise, forge a Response
e = UnhandledServerError(exception_string)
log.error("UNHANDLED EXCEPTION: %s" % '\n'.join(trace))
res = e.http_reply()
t1 = timenow()
# Is the response an Error instance?
response_type = type(res).__name__
status_code = 200
is_an_error = 0
error = ''
error_description = ''
error_user_message = ''
error_id = ''
if isinstance(res, Response):
# Got a flask.Response object
res_data = None
status_code = str(res.status_code)
if str(status_code) == '200':
# It could be any valid json response, but it could also be an Error model
# that pymacaron_core handled as a status 200 because it does not know of
# pymacaron Errors
if res.content_type == 'application/json':
s = str(res.data)
if '"error":' in s and '"error_description":' in s and '"status":' in s:
# This looks like an error, let's decode it
res_data = res.get_data()
else:
# Assuming it is a PyMacaronException.http_reply()
res_data = res.get_data()
if res_data:
if type(res_data) is bytes:
res_data = res_data.decode("utf-8")
is_json = True
try:
j = json.loads(res_data)
except ValueError as e:
# This was a plain html response. Fake an error
is_json = False
j = {'error': res_data, 'status': status_code}
# Make sure that the response gets the same status as the PyMacaron Error it contained
status_code = j['status']
res.status_code = int(status_code)
# Patch Response to contain a unique id
if is_json:
if 'error_id' not in j:
# If the error is forwarded by multiple micro-services, we
# want the error_id to be set only on the original error
error_id = str(uuid.uuid4())
j['error_id'] = error_id
res.set_data(json.dumps(j))
if error_decorator:
# Apply error_decorator, if any defined
res.set_data(json.dumps(error_decorator(j)))
# And extract data from this error
error = j.get('error', 'NO_ERROR_IN_JSON')
error_description = j.get('error_description', res_data)
if error_description == '':
error_description = res_data
if not exception_string:
exception_string = error_description
error_user_message = j.get('user_message', '')
is_an_error = 1
request_args = []
if len(args):
request_args.append(args)
if kwargs:
request_args.append(kwargs)
data.update({
# Set only on the original error, not on forwarded ones, not on
# success responses
'error_id': error_id,
# Call results
'time': {
'start': t0.isoformat(),
'end': t1.isoformat(),
'microsecs': (t1.timestamp() - t0.timestamp()) * 1000000,
},
# Response details
'response': {
'type': response_type,
'status': str(status_code),
'is_error': is_an_error,
'error_code': error,
'error_description': error_description,
'user_message': error_user_message,
},
# Request details
'request': {
'params': pformat(request_args),
},
})
populate_error_report(data)
# inspect may raise a UnicodeDecodeError...
fname = function_name(f)
#
# Should we report this call?
#
# If it is an internal errors, report it
if data['response']['status'] and int(data['response']['status']) >= 500:
report_error(
title="%s(): %s" % (fname, exception_string),
data=data,
is_fatal=True
)
log.info("")
log.info(" <= Done!")
log.info("")
return res
return wrapper
return crash_handler | Return the crash_handler to pass to pymacaron_core, with optional error decoration | entailment |
def create(self, **kwargs):
"""Create a metric."""
url_str = self.base_url
if 'tenant_id' in kwargs:
url_str = url_str + '?tenant_id=%s' % kwargs['tenant_id']
del kwargs['tenant_id']
data = kwargs['jsonbody'] if 'jsonbody' in kwargs else kwargs
body = self.client.create(url=url_str, json=data)
return body | Create a metric. | entailment |
def get_json_results(self, response):
'''
Parses the request result and returns the JSON object. Handles all errors.
'''
try:
# return the proper JSON object, or error code if request didn't go through.
self.most_recent_json = response.json()
json_results = response.json()
if response.status_code in [401, 403]: #401 is invalid key, 403 is out of monthly quota.
raise PyMsCognitiveWebSearchException("CODE {code}: {message}".format(code=response.status_code,message=json_results["message"]) )
elif response.status_code in [429]: #429 means try again in x seconds.
message = json_results['message']
try:
# extract time out seconds from response
timeout = int(re.search('in (.+?) seconds', message).group(1)) + 1
print ("CODE 429, sleeping for {timeout} seconds").format(timeout=str(timeout))
time.sleep(timeout)
except (AttributeError, ValueError) as e:
if not self.silent_fail:
raise PyMsCognitiveWebSearchException("CODE 429. Failed to auto-sleep: {message}".format(code=response.status_code,message=json_results["message"]) )
else:
print ("CODE 429. Failed to auto-sleep: {message}. Trying again in 5 seconds.".format(code=response.status_code,message=json_results["message"]))
time.sleep(5)
except ValueError as vE:
if not self.silent_fail:
raise PyMsCognitiveWebSearchException("Request returned with code %s, error msg: %s" % (r.status_code, r.text))
else:
print ("[ERROR] Request returned with code %s, error msg: %s. \nContinuing in 5 seconds." % (r.status_code, r.text))
time.sleep(5)
return json_results | Parses the request result and returns the JSON object. Handles all errors. | entailment |
def search_all(self, quota=50, format='json'):
'''
Returns a single list containing up to 'limit' Result objects
Will keep requesting until quota is met
Will also truncate extra results to return exactly the given quota
'''
quota_left = quota
results = []
while quota_left > 0:
more_results = self._search(quota_left, format)
if not more_results:
break
results += more_results
quota_left = quota_left - len(more_results)
time.sleep(1)
results = results[0:quota]
return results | Returns a single list containing up to 'limit' Result objects
Will keep requesting until quota is met
Will also truncate extra results to return exactly the given quota | entailment |
def format_parameters(params):
'''Reformat parameters into dict of format expected by the API.'''
if not params:
return {}
# expect multiple invocations of --parameters but fall back
# to ; delimited if only one --parameters is specified
if len(params) == 1:
if params[0].find(';') != -1: # found
params = params[0].split(';')
else:
params = params[0].split(',')
parameters = {}
for p in params:
try:
(n, v) = p.split('=', 1)
except ValueError:
msg = '%s(%s). %s.' % ('Malformed parameter', p,
'Use the key=value format')
raise exc.CommandError(msg)
if n not in parameters:
parameters[n] = v
else:
if not isinstance(parameters[n], list):
parameters[n] = [parameters[n]]
parameters[n].append(v)
return parameters | Reformat parameters into dict of format expected by the API. | entailment |
def delete(self, **kwargs):
"""Delete a specific alarm."""
url_str = self.base_url + '/%s' % kwargs['alarm_id']
resp = self.client.delete(url_str)
return resp | Delete a specific alarm. | entailment |
def history(self, **kwargs):
"""History of a specific alarm."""
url_str = self.base_url + '/%s/state-history' % kwargs['alarm_id']
del kwargs['alarm_id']
if kwargs:
url_str = url_str + '?%s' % parse.urlencode(kwargs, True)
resp = self.client.list(url_str)
return resp['elements'] if type(resp) is dict else resp | History of a specific alarm. | entailment |
def history_list(self, **kwargs):
"""History list of alarm state."""
url_str = self.base_url + '/state-history/'
if 'dimensions' in kwargs:
dimstr = self.get_dimensions_url_string(kwargs['dimensions'])
kwargs['dimensions'] = dimstr
if kwargs:
url_str = url_str + '?%s' % parse.urlencode(kwargs, True)
resp = self.client.list(url_str)
return resp['elements'] if type(resp) is dict else resp | History list of alarm state. | entailment |
def do_version():
"""Return version details of the running server api"""
v = ApiPool.ping.model.Version(
name=ApiPool().current_server_name,
version=ApiPool().current_server_api.get_version(),
container=get_container_version(),
)
log.info("/version: " + pprint.pformat(v))
return v | Return version details of the running server api | entailment |
def do_metric_create(mc, args):
'''Create metric.'''
fields = {}
fields['name'] = args.name
if args.dimensions:
fields['dimensions'] = utils.format_parameters(args.dimensions)
fields['timestamp'] = args.time
fields['value'] = args.value
if args.value_meta:
fields['value_meta'] = utils.format_parameters(args.value_meta)
if args.project_id:
fields['tenant_id'] = args.project_id
try:
mc.metrics.create(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
print('Successfully created metric') | Create metric. | entailment |
def do_metric_create_raw(mc, args):
'''Create metric from raw json body.'''
try:
mc.metrics.create(**args.jsonbody)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
print('Successfully created metric') | Create metric from raw json body. | entailment |
def do_metric_name_list(mc, args):
'''List names of metrics.'''
fields = {}
if args.dimensions:
fields['dimensions'] = utils.format_dimensions_query(args.dimensions)
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
if args.tenant_id:
fields['tenant_id'] = args.tenant_id
try:
metric_names = mc.metrics.list_names(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
if args.json:
print(utils.json_formatter(metric_names))
return
if isinstance(metric_names, list):
utils.print_list(metric_names, ['Name'], formatters={'Name': lambda x: x['name']}) | List names of metrics. | entailment |
def do_metric_list(mc, args):
'''List metrics for this tenant.'''
fields = {}
if args.name:
fields['name'] = args.name
if args.dimensions:
fields['dimensions'] = utils.format_dimensions_query(args.dimensions)
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
if args.starttime:
_translate_starttime(args)
fields['start_time'] = args.starttime
if args.endtime:
fields['end_time'] = args.endtime
if args.tenant_id:
fields['tenant_id'] = args.tenant_id
try:
metric = mc.metrics.list(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
if args.json:
print(utils.json_formatter(metric))
return
cols = ['name', 'dimensions']
formatters = {
'name': lambda x: x['name'],
'dimensions': lambda x: utils.format_dict(x['dimensions']),
}
if isinstance(metric, list):
# print the list
utils.print_list(metric, cols, formatters=formatters)
else:
# add the dictionary to a list, so print_list works
metric_list = list()
metric_list.append(metric)
utils.print_list(
metric_list,
cols,
formatters=formatters) | List metrics for this tenant. | entailment |
def do_dimension_name_list(mc, args):
'''List names of metric dimensions.'''
fields = {}
if args.metric_name:
fields['metric_name'] = args.metric_name
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
if args.tenant_id:
fields['tenant_id'] = args.tenant_id
try:
dimension_names = mc.metrics.list_dimension_names(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
if args.json:
print(utils.json_formatter(dimension_names))
return
if isinstance(dimension_names, list):
utils.print_list(dimension_names, ['Dimension Names'], formatters={
'Dimension Names': lambda x: x['dimension_name']}) | List names of metric dimensions. | entailment |
def do_dimension_value_list(mc, args):
'''List names of metric dimensions.'''
fields = {}
fields['dimension_name'] = args.dimension_name
if args.metric_name:
fields['metric_name'] = args.metric_name
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
if args.tenant_id:
fields['tenant_id'] = args.tenant_id
try:
dimension_values = mc.metrics.list_dimension_values(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
if args.json:
print(utils.json_formatter(dimension_values))
return
if isinstance(dimension_values, list):
utils.print_list(dimension_values, ['Dimension Values'], formatters={
'Dimension Values': lambda x: x['dimension_value']}) | List names of metric dimensions. | entailment |
def do_metric_statistics(mc, args):
'''List measurement statistics for the specified metric.'''
statistic_types = ['AVG', 'MIN', 'MAX', 'COUNT', 'SUM']
statlist = args.statistics.split(',')
for stat in statlist:
if stat.upper() not in statistic_types:
errmsg = ('Invalid type, not one of [' +
', '.join(statistic_types) + ']')
raise osc_exc.CommandError(errmsg)
fields = {}
fields['name'] = args.name
if args.dimensions:
fields['dimensions'] = utils.format_dimensions_query(args.dimensions)
_translate_starttime(args)
fields['start_time'] = args.starttime
if args.endtime:
fields['end_time'] = args.endtime
if args.period:
fields['period'] = args.period
fields['statistics'] = args.statistics
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
if args.merge_metrics:
fields['merge_metrics'] = args.merge_metrics
if args.group_by:
fields['group_by'] = args.group_by
if args.tenant_id:
fields['tenant_id'] = args.tenant_id
try:
metric = mc.metrics.list_statistics(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
if args.json:
print(utils.json_formatter(metric))
return
cols = ['name', 'dimensions']
# add dynamic column names
if metric:
column_names = metric[0]['columns']
for name in column_names:
cols.append(name)
else:
# when empty set, print_list needs a col
cols.append('timestamp')
formatters = {
'name': lambda x: x['name'],
'dimensions': lambda x: utils.format_dict(x['dimensions']),
'timestamp': lambda x:
format_statistic_timestamp(x['statistics'], x['columns'],
'timestamp'),
'avg': lambda x:
format_statistic_value(x['statistics'], x['columns'], 'avg'),
'min': lambda x:
format_statistic_value(x['statistics'], x['columns'], 'min'),
'max': lambda x:
format_statistic_value(x['statistics'], x['columns'], 'max'),
'count': lambda x:
format_statistic_value(x['statistics'], x['columns'], 'count'),
'sum': lambda x:
format_statistic_value(x['statistics'], x['columns'], 'sum'),
}
if isinstance(metric, list):
# print the list
utils.print_list(metric, cols, formatters=formatters)
else:
# add the dictionary to a list, so print_list works
metric_list = list()
metric_list.append(metric)
utils.print_list(
metric_list,
cols,
formatters=formatters) | List measurement statistics for the specified metric. | entailment |
def do_notification_show(mc, args):
'''Describe the notification.'''
fields = {}
fields['notification_id'] = args.id
try:
notification = mc.notifications.get(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
if args.json:
print(utils.json_formatter(notification))
return
formatters = {
'name': utils.json_formatter,
'id': utils.json_formatter,
'type': utils.json_formatter,
'address': utils.json_formatter,
'period': utils.json_formatter,
'links': utils.format_dictlist,
}
utils.print_dict(notification, formatters=formatters) | Describe the notification. | entailment |
def do_notification_list(mc, args):
'''List notifications for this tenant.'''
fields = {}
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
if args.sort_by:
sort_by = args.sort_by.split(',')
for field in sort_by:
field_values = field.lower().split()
if len(field_values) > 2:
print("Invalid sort_by value {}".format(field))
if field_values[0] not in allowed_notification_sort_by:
print("Sort-by field name {} is not in [{}]".format(field_values[0],
allowed_notification_sort_by))
return
if len(field_values) > 1 and field_values[1] not in ['asc', 'desc']:
print("Invalid value {}, must be asc or desc".format(field_values[1]))
fields['sort_by'] = args.sort_by
try:
notification = mc.notifications.list(**fields)
except osc_exc.ClientException as he:
raise osc_exc.CommandError(
'ClientException code=%s message=%s' %
(he.code, he.message))
else:
if args.json:
print(utils.json_formatter(notification))
return
cols = ['name', 'id', 'type', 'address', 'period']
formatters = {
'name': lambda x: x['name'],
'id': lambda x: x['id'],
'type': lambda x: x['type'],
'address': lambda x: x['address'],
'period': lambda x: x['period'],
}
if isinstance(notification, list):
utils.print_list(
notification,
cols,
formatters=formatters)
else:
notif_list = list()
notif_list.append(notification)
utils.print_list(notif_list, cols, formatters=formatters) | List notifications for this tenant. | entailment |
def do_notification_delete(mc, args):
'''Delete notification.'''
fields = {}
fields['notification_id'] = args.id
try:
mc.notifications.delete(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
print('Successfully deleted notification') | Delete notification. | entailment |
def do_notification_update(mc, args):
'''Update notification.'''
fields = {}
fields['notification_id'] = args.id
fields['name'] = args.name
fields['type'] = args.type
fields['address'] = args.address
if not _validate_notification_period(args.period, args.type.upper()):
return
fields['period'] = args.period
try:
notification = mc.notifications.update(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
print(jsonutils.dumps(notification, indent=2)) | Update notification. | entailment |
def do_alarm_definition_create(mc, args):
'''Create an alarm definition.'''
fields = {}
fields['name'] = args.name
if args.description:
fields['description'] = args.description
fields['expression'] = args.expression
if args.alarm_actions:
fields['alarm_actions'] = args.alarm_actions
if args.ok_actions:
fields['ok_actions'] = args.ok_actions
if args.undetermined_actions:
fields['undetermined_actions'] = args.undetermined_actions
if args.severity:
if not _validate_severity(args.severity):
return
fields['severity'] = args.severity
if args.match_by:
fields['match_by'] = args.match_by.split(',')
try:
alarm = mc.alarm_definitions.create(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
print(jsonutils.dumps(alarm, indent=2)) | Create an alarm definition. | entailment |
def do_alarm_definition_list(mc, args):
'''List alarm definitions for this tenant.'''
fields = {}
if args.name:
fields['name'] = args.name
if args.dimensions:
fields['dimensions'] = utils.format_dimensions_query(args.dimensions)
if args.severity:
if not _validate_severity(args.severity):
return
fields['severity'] = args.severity
if args.sort_by:
sort_by = args.sort_by.split(',')
for field in sort_by:
field_values = field.split()
if len(field_values) > 2:
print("Invalid sort_by value {}".format(field))
if field_values[0] not in allowed_definition_sort_by:
print("Sort-by field name {} is not in [{}]".format(field_values[0],
allowed_definition_sort_by))
return
if len(field_values) > 1 and field_values[1] not in ['asc', 'desc']:
print("Invalid value {}, must be asc or desc".format(field_values[1]))
fields['sort_by'] = args.sort_by
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
try:
alarm = mc.alarm_definitions.list(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
if args.json:
print(utils.json_formatter(alarm))
return
cols = ['name', 'id', 'expression', 'match_by', 'actions_enabled']
formatters = {
'name': lambda x: x['name'],
'id': lambda x: x['id'],
'expression': lambda x: x['expression'],
'match_by': lambda x: utils.format_list(x['match_by']),
'actions_enabled': lambda x: x['actions_enabled'],
}
if isinstance(alarm, list):
# print the list
utils.print_list(alarm, cols, formatters=formatters)
else:
# add the dictionary to a list, so print_list works
alarm_list = list()
alarm_list.append(alarm)
utils.print_list(alarm_list, cols, formatters=formatters) | List alarm definitions for this tenant. | entailment |
def do_alarm_definition_delete(mc, args):
'''Delete the alarm definition.'''
fields = {}
fields['alarm_id'] = args.id
try:
mc.alarm_definitions.delete(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
print('Successfully deleted alarm definition') | Delete the alarm definition. | entailment |
def do_alarm_definition_update(mc, args):
'''Update the alarm definition.'''
fields = {}
fields['alarm_id'] = args.id
fields['name'] = args.name
fields['description'] = args.description
fields['expression'] = args.expression
fields['alarm_actions'] = _arg_split_patch_update(args.alarm_actions)
fields['ok_actions'] = _arg_split_patch_update(args.ok_actions)
fields['undetermined_actions'] = _arg_split_patch_update(args.undetermined_actions)
if args.actions_enabled not in enabled_types:
errmsg = ('Invalid value, not one of [' +
', '.join(enabled_types) + ']')
print(errmsg)
return
fields['actions_enabled'] = args.actions_enabled in ['true', 'True']
fields['match_by'] = _arg_split_patch_update(args.match_by)
if not _validate_severity(args.severity):
return
fields['severity'] = args.severity
try:
alarm = mc.alarm_definitions.update(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
print(jsonutils.dumps(alarm, indent=2)) | Update the alarm definition. | entailment |
def do_alarm_definition_patch(mc, args):
'''Patch the alarm definition.'''
fields = {}
fields['alarm_id'] = args.id
if args.name:
fields['name'] = args.name
if args.description:
fields['description'] = args.description
if args.expression:
fields['expression'] = args.expression
if args.alarm_actions:
fields['alarm_actions'] = _arg_split_patch_update(args.alarm_actions, patch=True)
if args.ok_actions:
fields['ok_actions'] = _arg_split_patch_update(args.ok_actions, patch=True)
if args.undetermined_actions:
fields['undetermined_actions'] = _arg_split_patch_update(args.undetermined_actions,
patch=True)
if args.actions_enabled:
if args.actions_enabled not in enabled_types:
errmsg = ('Invalid value, not one of [' +
', '.join(enabled_types) + ']')
print(errmsg)
return
fields['actions_enabled'] = args.actions_enabled in ['true', 'True']
if args.severity:
if not _validate_severity(args.severity):
return
fields['severity'] = args.severity
try:
alarm = mc.alarm_definitions.patch(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
print(jsonutils.dumps(alarm, indent=2)) | Patch the alarm definition. | entailment |
def do_alarm_list(mc, args):
'''List alarms for this tenant.'''
fields = {}
if args.alarm_definition_id:
fields['alarm_definition_id'] = args.alarm_definition_id
if args.metric_name:
fields['metric_name'] = args.metric_name
if args.metric_dimensions:
fields['metric_dimensions'] = utils.format_dimensions_query(args.metric_dimensions)
if args.state:
if args.state.upper() not in state_types:
errmsg = ('Invalid state, not one of [' +
', '.join(state_types) + ']')
print(errmsg)
return
fields['state'] = args.state
if args.severity:
if not _validate_severity(args.severity):
return
fields['severity'] = args.severity
if args.state_updated_start_time:
fields['state_updated_start_time'] = args.state_updated_start_time
if args.lifecycle_state:
fields['lifecycle_state'] = args.lifecycle_state
if args.link:
fields['link'] = args.link
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
if args.sort_by:
sort_by = args.sort_by.split(',')
for field in sort_by:
field_values = field.lower().split()
if len(field_values) > 2:
print("Invalid sort_by value {}".format(field))
if field_values[0] not in allowed_alarm_sort_by:
print("Sort-by field name {} is not in [{}]".format(field_values[0],
allowed_alarm_sort_by))
return
if len(field_values) > 1 and field_values[1] not in ['asc', 'desc']:
print("Invalid value {}, must be asc or desc".format(field_values[1]))
fields['sort_by'] = args.sort_by
try:
alarm = mc.alarms.list(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
if args.json:
print(utils.json_formatter(alarm))
return
cols = ['id', 'alarm_definition_id', 'alarm_definition_name', 'metric_name',
'metric_dimensions', 'severity', 'state', 'lifecycle_state', 'link',
'state_updated_timestamp', 'updated_timestamp', "created_timestamp"]
formatters = {
'id': lambda x: x['id'],
'alarm_definition_id': lambda x: x['alarm_definition']['id'],
'alarm_definition_name': lambda x: x['alarm_definition']['name'],
'metric_name': lambda x: format_metric_name(x['metrics']),
'metric_dimensions': lambda x: format_metric_dimensions(x['metrics']),
'severity': lambda x: x['alarm_definition']['severity'],
'state': lambda x: x['state'],
'lifecycle_state': lambda x: x['lifecycle_state'],
'link': lambda x: x['link'],
'state_updated_timestamp': lambda x: x['state_updated_timestamp'],
'updated_timestamp': lambda x: x['updated_timestamp'],
'created_timestamp': lambda x: x['created_timestamp'],
}
if isinstance(alarm, list):
# print the list
utils.print_list(alarm, cols, formatters=formatters)
else:
# add the dictionary to a list, so print_list works
alarm_list = list()
alarm_list.append(alarm)
utils.print_list(alarm_list, cols, formatters=formatters) | List alarms for this tenant. | entailment |
def do_alarm_show(mc, args):
'''Describe the alarm.'''
fields = {}
fields['alarm_id'] = args.id
try:
alarm = mc.alarms.get(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
if args.json:
print(utils.json_formatter(alarm))
return
# print out detail of a single alarm
formatters = {
'id': utils.json_formatter,
'alarm_definition': utils.json_formatter,
'metrics': utils.json_formatter,
'state': utils.json_formatter,
'links': utils.format_dictlist,
}
utils.print_dict(alarm, formatters=formatters) | Describe the alarm. | entailment |
def do_alarm_update(mc, args):
'''Update the alarm state.'''
fields = {}
fields['alarm_id'] = args.id
if args.state.upper() not in state_types:
errmsg = ('Invalid state, not one of [' +
', '.join(state_types) + ']')
print(errmsg)
return
fields['state'] = args.state
fields['lifecycle_state'] = args.lifecycle_state
fields['link'] = args.link
try:
alarm = mc.alarms.update(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
print(jsonutils.dumps(alarm, indent=2)) | Update the alarm state. | entailment |
def do_alarm_delete(mc, args):
'''Delete the alarm.'''
fields = {}
fields['alarm_id'] = args.id
try:
mc.alarms.delete(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
print('Successfully deleted alarm') | Delete the alarm. | entailment |
def do_alarm_count(mc, args):
'''Count alarms.'''
fields = {}
if args.alarm_definition_id:
fields['alarm_definition_id'] = args.alarm_definition_id
if args.metric_name:
fields['metric_name'] = args.metric_name
if args.metric_dimensions:
fields['metric_dimensions'] = utils.format_dimensions_query(args.metric_dimensions)
if args.state:
if args.state.upper() not in state_types:
errmsg = ('Invalid state, not one of [' +
', '.join(state_types) + ']')
print(errmsg)
return
fields['state'] = args.state
if args.severity:
if not _validate_severity(args.severity):
return
fields['severity'] = args.severity
if args.state_updated_start_time:
fields['state_updated_start_time'] = args.state_updated_start_time
if args.lifecycle_state:
fields['lifecycle_state'] = args.lifecycle_state
if args.link:
fields['link'] = args.link
if args.group_by:
group_by = args.group_by.split(',')
if not set(group_by).issubset(set(group_by_types)):
errmsg = ('Invalid group-by, one or more values not in [' +
','.join(group_by_types) + ']')
print(errmsg)
return
fields['group_by'] = args.group_by
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
try:
counts = mc.alarms.count(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
if args.json:
print(utils.json_formatter(counts))
return
cols = counts['columns']
utils.print_list(counts['counts'], [i for i in range(len(cols))],
field_labels=cols) | Count alarms. | entailment |
def do_alarm_history(mc, args):
'''Alarm state transition history.'''
fields = {}
fields['alarm_id'] = args.id
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
try:
alarm = mc.alarms.history(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
output_alarm_history(args, alarm) | Alarm state transition history. | entailment |
def do_alarm_history_list(mc, args):
'''List alarms state history.'''
fields = {}
if args.dimensions:
fields['dimensions'] = utils.format_parameters(args.dimensions)
if args.starttime:
_translate_starttime(args)
fields['start_time'] = args.starttime
if args.endtime:
fields['end_time'] = args.endtime
if args.limit:
fields['limit'] = args.limit
if args.offset:
fields['offset'] = args.offset
try:
alarm = mc.alarms.history_list(**fields)
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
output_alarm_history(args, alarm) | List alarms state history. | entailment |
def do_notification_type_list(mc, args):
'''List notification types supported by monasca.'''
try:
notification_types = mc.notificationtypes.list()
except (osc_exc.ClientException, k_exc.HttpError) as he:
raise osc_exc.CommandError('%s\n%s' % (he.message, he.details))
else:
if args.json:
print(utils.json_formatter(notification_types))
return
else:
formatters = {'types': lambda x: x["type"]}
# utils.print_list(notification_types['types'], ["types"], formatters=formatters)
utils.print_list(notification_types, ["types"], formatters=formatters) | List notification types supported by monasca. | entailment |
def requires_auth(f):
"""A decorator for flask api methods that validates auth0 tokens, hence ensuring
that the user is authenticated. Code coped from:
https://github.com/auth0/auth0-python/tree/master/examples/flask-api
"""
@wraps(f)
def requires_auth_decorator(*args, **kwargs):
try:
authenticate_http_request()
except PyMacaronException as e:
return e.http_reply()
return f(*args, **kwargs)
return requires_auth_decorator | A decorator for flask api methods that validates auth0 tokens, hence ensuring
that the user is authenticated. Code coped from:
https://github.com/auth0/auth0-python/tree/master/examples/flask-api | entailment |
def add_auth(f):
"""A decorator that adds the authentication header to requests arguments"""
def add_auth_decorator(*args, **kwargs):
token = get_user_token()
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['Authorization'] = "Bearer %s" % token
return f(*args, **kwargs)
return add_auth_decorator | A decorator that adds the authentication header to requests arguments | entailment |
def load_auth_token(token, load=True):
"""Validate an auth0 token. Returns the token's payload, or an exception
of the type:"""
assert get_config().jwt_secret, "No JWT secret configured for pymacaron"
assert get_config().jwt_issuer, "No JWT issuer configured for pymacaron"
assert get_config().jwt_audience, "No JWT audience configured for pymacaron"
log.info("Validating token, using issuer:%s, audience:%s, secret:%s***" % (
get_config().jwt_issuer,
get_config().jwt_audience,
get_config().jwt_secret[1:8],
))
# First extract the issuer
issuer = get_config().jwt_issuer
try:
headers = jwt.get_unverified_header(token)
except jwt.DecodeError:
raise AuthInvalidTokenError('token signature is invalid')
log.debug("Token has headers %s" % headers)
if 'iss' in headers:
issuer = headers['iss']
# Then validate the token against this issuer
log.info("Validating token in issuer %s" % issuer)
try:
payload = jwt.decode(
token,
get_config().jwt_secret,
audience=get_config().jwt_audience,
# Allow for a time difference of up to 5min (300sec)
leeway=300
)
except jwt.ExpiredSignature:
raise AuthTokenExpiredError('Auth token is expired')
except jwt.InvalidAudienceError:
raise AuthInvalidTokenError('incorrect audience')
except jwt.DecodeError:
raise AuthInvalidTokenError('token signature is invalid')
except jwt.InvalidIssuedAtError:
raise AuthInvalidTokenError('Token was issued in the future')
# Save payload to stack
payload['token'] = token
payload['iss'] = issuer
if load:
stack.top.current_user = payload
return payload | Validate an auth0 token. Returns the token's payload, or an exception
of the type: | entailment |
def authenticate_http_request(token=None):
"""Validate auth0 tokens passed in the request's header, hence ensuring
that the user is authenticated. Code copied from:
https://github.com/auth0/auth0-python/tree/master/examples/flask-api
Return a PntCommonException if failed to validate authentication.
Otherwise, return the token's payload (Also stored in stack.top.current_user)
"""
if token:
auth = token
else:
auth = request.headers.get('Authorization', None)
if not auth:
auth = request.cookies.get('token', None)
if auth:
auth = unquote_plus(auth)
log.debug("Validating Auth header [%s]" % auth)
if not auth:
raise AuthMissingHeaderError('There is no Authorization header in the HTTP request')
parts = auth.split()
if parts[0].lower() != 'bearer':
raise AuthInvalidTokenError('Authorization header must start with Bearer')
elif len(parts) == 1:
raise AuthInvalidTokenError('Token not found in Authorization header')
elif len(parts) > 2:
raise AuthInvalidTokenError('Authorization header must be Bearer + \s + token')
token = parts[1]
return load_auth_token(token) | Validate auth0 tokens passed in the request's header, hence ensuring
that the user is authenticated. Code copied from:
https://github.com/auth0/auth0-python/tree/master/examples/flask-api
Return a PntCommonException if failed to validate authentication.
Otherwise, return the token's payload (Also stored in stack.top.current_user) | entailment |
def generate_token(user_id, expire_in=None, data={}, issuer=None, iat=None):
"""Generate a new JWT token for this user_id. Default expiration date
is 1 year from creation time"""
assert user_id, "No user_id passed to generate_token()"
assert isinstance(data, dict), "generate_token(data=) should be a dictionary"
assert get_config().jwt_secret, "No JWT secret configured in pymacaron"
if not issuer:
issuer = get_config().jwt_issuer
assert issuer, "No JWT issuer configured for pymacaron"
if expire_in is None:
expire_in = get_config().jwt_token_timeout
if iat:
epoch_now = iat
else:
epoch_now = to_epoch(timenow())
epoch_end = epoch_now + expire_in
data['iss'] = issuer
data['sub'] = user_id
data['aud'] = get_config().jwt_audience
data['exp'] = epoch_end
data['iat'] = epoch_now
headers = {
"typ": "JWT",
"alg": "HS256",
"iss": issuer,
}
log.debug("Encoding token with data %s and headers %s (secret:%s****)" % (data, headers, get_config().jwt_secret[0:8]))
t = jwt.encode(
data,
get_config().jwt_secret,
headers=headers,
)
if type(t) is bytes:
t = t.decode("utf-8")
return t | Generate a new JWT token for this user_id. Default expiration date
is 1 year from creation time | entailment |
def get_user_token():
"""Return the authenticated user's auth token"""
if not hasattr(stack.top, 'current_user'):
return ''
current_user = stack.top.current_user
return current_user.get('token', '') | Return the authenticated user's auth token | entailment |
def get_token_issuer():
"""Return the issuer in which this user's token was created"""
try:
current_user = stack.top.current_user
return current_user.get('iss', get_config().jwt_issuer)
except Exception:
pass
return get_config().jwt_issuer | Return the issuer in which this user's token was created | entailment |
def _search(self, limit, format):
'''
Returns a list of result objects, with the url for the next page MsCognitive search url.
'''
limit = min(limit, self.MAX_SEARCH_PER_QUERY)
payload = {
'q' : self.query,
'count' : limit, #currently 50 is max per search.
'offset': self.current_offset,
}
payload.update(self.CUSTOM_PARAMS)
headers = { 'Ocp-Apim-Subscription-Key' : self.api_key }
if not self.silent_fail:
QueryChecker.check_web_params(payload, headers)
response = requests.get(self.QUERY_URL, params=payload, headers=headers)
json_results = self.get_json_results(response)
packaged_results = [NewsResult(single_result_json) for single_result_json in json_results["value"]]
self.current_offset += min(50, limit, len(packaged_results))
return packaged_results | Returns a list of result objects, with the url for the next page MsCognitive search url. | entailment |
def create(self, **kwargs):
"""Create a notification."""
body = self.client.create(url=self.base_url,
json=kwargs)
return body | Create a notification. | entailment |
def get(self, **kwargs):
"""Get the details for a specific notification."""
# NOTE(trebskit) should actually be find_one, but
# monasca does not support expected response format
url = '%s/%s' % (self.base_url, kwargs['notification_id'])
resp = self.client.list(path=url)
return resp | Get the details for a specific notification. | entailment |
def delete(self, **kwargs):
"""Delete a notification."""
url = self.base_url + '/%s' % kwargs['notification_id']
resp = self.client.delete(url=url)
return resp | Delete a notification. | entailment |
def create(self, **kwargs):
"""Create an alarm definition."""
resp = self.client.create(url=self.base_url,
json=kwargs)
return resp | Create an alarm definition. | entailment |
def update(self, **kwargs):
"""Update a specific alarm definition."""
url_str = self.base_url + '/%s' % kwargs['alarm_id']
del kwargs['alarm_id']
resp = self.client.create(url=url_str,
method='PUT',
json=kwargs)
return resp | Update a specific alarm definition. | entailment |
def compute_y(self, coefficients, num_x):
""" Return calculated y-values for the domain of x-values in [1, num_x]. """
y_vals = []
for x in range(1, num_x + 1):
y = sum([c * x ** i for i, c in enumerate(coefficients[::-1])])
y_vals.append(y)
return y_vals | Return calculated y-values for the domain of x-values in [1, num_x]. | entailment |
def compute_err(self, solution_y, coefficients):
"""
Return an error value by finding the absolute difference for each
element in a list of solution-generated y-values versus expected values.
Compounds error by 50% for each negative coefficient in the solution.
solution_y: list of y-values produced by a solution
coefficients: list of polynomial coefficients represented by the solution
return: error value
"""
error = 0
for modeled, expected in zip(solution_y, self.expected_values):
error += abs(modeled - expected)
if any([c < 0 for c in coefficients]):
error *= 1.5
return error | Return an error value by finding the absolute difference for each
element in a list of solution-generated y-values versus expected values.
Compounds error by 50% for each negative coefficient in the solution.
solution_y: list of y-values produced by a solution
coefficients: list of polynomial coefficients represented by the solution
return: error value | entailment |
def eval_fitness(self, chromosome):
"""
Evaluate the polynomial equation using coefficients represented by a
solution/chromosome, returning its error as the solution's fitness.
return: fitness value
"""
coefficients = self.translator.translate_chromosome(chromosome)
solution_y = self.compute_y(coefficients, self.num_x)
fitness = -1 * self.compute_err(solution_y, coefficients)
return fitness | Evaluate the polynomial equation using coefficients represented by a
solution/chromosome, returning its error as the solution's fitness.
return: fitness value | entailment |
def read_config(ip, mac):
"""Read the current configuration of a myStrom device."""
click.echo("Read configuration from %s" % ip)
request = requests.get(
'http://{}/{}/{}/'.format(ip, URI, mac), timeout=TIMEOUT)
print(request.json()) | Read the current configuration of a myStrom device. | entailment |
def write_config(ip, mac, single, double, long, touch):
"""Write the current configuration of a myStrom button."""
click.echo("Write configuration to device %s" % ip)
data = {
'single': single,
'double': double,
'long': long,
'touch': touch,
}
request = requests.post(
'http://{}/{}/{}/'.format(ip, URI, mac), data=data, timeout=TIMEOUT)
if request.status_code == 200:
click.echo("Configuration of %s set" % mac) | Write the current configuration of a myStrom button. | entailment |
def write_ha_config(ip, mac, hass, port, id):
"""Write the configuration for Home Assistant to a myStrom button."""
click.echo("Write configuration for Home Assistant to device %s..." % ip)
action = "get://{1}:{2}/api/mystrom?{0}={3}"
data = {
'single': action.format('single', hass, port, id),
'double': action.format('double', hass, port, id),
'long': action.format('long', hass, port, id),
'touch': action.format('touch', hass, port, id),
}
request = requests.post(
'http://{}/{}/{}/'.format(ip, URI, mac), data=data, timeout=TIMEOUT)
if request.status_code == 200:
click.echo("Configuration for %s set" % ip)
click.echo("After using the push pattern the first time then "
"the myStrom WiFi Button will show up as %s" % id) | Write the configuration for Home Assistant to a myStrom button. | entailment |
def reset_config(ip, mac):
"""Reset the current configuration of a myStrom WiFi Button."""
click.echo("Reset configuration of button %s..." % ip)
data = {
'single': "",
'double': "",
'long': "",
'touch': "",
}
request = requests.post(
'http://{}/{}/{}/'.format(ip, URI, mac), data=data, timeout=TIMEOUT)
if request.status_code == 200:
click.echo("Reset configuration of %s" % mac) | Reset the current configuration of a myStrom WiFi Button. | entailment |
def color(ip, mac, hue, saturation, value):
"""Switch the bulb on with the given color."""
bulb = MyStromBulb(ip, mac)
bulb.set_color_hsv(hue, saturation, value) | Switch the bulb on with the given color. | entailment |
def is_ec2_instance():
"""Try fetching instance metadata at 'curl http://169.254.169.254/latest/meta-data/'
to see if host is on an ec2 instance"""
# Note: this code assumes that docker containers running on ec2 instances
# inherit instances metadata, which they do as of 2016-08-25
global IS_EC2_INSTANCE
if IS_EC2_INSTANCE != -1:
# Returned the cached value
return IS_EC2_INSTANCE
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(0.2)
try:
s.connect(("169.254.169.254", 80))
IS_EC2_INSTANCE = 1
return True
except socket.timeout:
IS_EC2_INSTANCE = 0
return False
except socket.error:
IS_EC2_INSTANCE = 0
return False | Try fetching instance metadata at 'curl http://169.254.169.254/latest/meta-data/'
to see if host is on an ec2 instance | entailment |
def to_epoch(t):
"""Take a datetime, either as a string or a datetime.datetime object,
and return the corresponding epoch"""
if isinstance(t, str):
if '+' not in t:
t = t + '+00:00'
t = parser.parse(t)
elif t.tzinfo is None or t.tzinfo.utcoffset(t) is None:
t = t.replace(tzinfo=pytz.timezone('utc'))
t0 = datetime.datetime(1970, 1, 1, 0, 0, 0, 0, pytz.timezone('utc'))
delta = t - t0
return int(delta.total_seconds()) | Take a datetime, either as a string or a datetime.datetime object,
and return the corresponding epoch | entailment |
def get_container_version():
"""Return the version of the docker container running the present server,
or '' if not in a container"""
root_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
version_file = os.path.join(root_dir, 'VERSION')
if os.path.exists(version_file):
with open(version_file) as f:
return f.read()
return '' | Return the version of the docker container running the present server,
or '' if not in a container | entailment |
def get_app_name():
"""Return a generic name for this app, usefull when reporting to monitoring/logging frameworks"""
conf = get_config()
if is_ec2_instance():
return conf.app_name_live if hasattr(conf, 'app_name_live') else 'PYMACARON_LIVE'
else:
return conf.app_name_dev if hasattr(conf, 'app_name_dev') else 'PYMACARON_DEV' | Return a generic name for this app, usefull when reporting to monitoring/logging frameworks | entailment |
def run_all(plot=True, seed=None):
""" Run all examples. """
if seed is not None:
import random
random.seed(seed)
print("Running biggest_multiple.py")
biggest_multiple.run(plot=plot)
print("Running polynomials.py")
polynomials.run(plot=plot)
print("Running travelling_salesman.py")
travelling_salesman.run(plot=plot)
print("Running irrigation.py")
irrigation.run() | Run all examples. | entailment |
def create_random(cls, length, **kwargs):
"""
Return a new instance of this gene class with random DNA,
with characters chosen from ``GENETIC_MATERIAL_OPTIONS``.
length: the number of characters in the randomized DNA
**kwargs: forwarded to the ``cls`` constructor
"""
dna = ''.join([random.choice(cls.GENETIC_MATERIAL_OPTIONS) for _ in range(length)])
return cls(dna, **kwargs) | Return a new instance of this gene class with random DNA,
with characters chosen from ``GENETIC_MATERIAL_OPTIONS``.
length: the number of characters in the randomized DNA
**kwargs: forwarded to the ``cls`` constructor | entailment |
def mutate(self, p_mutate):
"""
Simulate mutation against a probability.
p_mutate: probability for mutation to occur
"""
new_dna = []
for bit in self.dna:
if random.random() < p_mutate:
new_bit = bit
while new_bit == bit:
new_bit = random.choice(self.GENETIC_MATERIAL_OPTIONS)
bit = new_bit
new_dna.append(bit)
self.dna = ''.join(new_dna) | Simulate mutation against a probability.
p_mutate: probability for mutation to occur | entailment |
def copy(self):
""" Return a new instance of this gene with the same DNA. """
return type(self)(self.dna, suppressed=self.suppressed, name=self.name) | Return a new instance of this gene with the same DNA. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.