max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
solutions/official/11_bruteforce.py
|
kurazu/pycon_quiz
| 0
|
12777151
|
import hashlib
ENCODED = 'sha1$bh9ul$8e808fcea5418aa971311ea1598df65627ea3b98'
_, SALT, PASSWORD = ENCODED.split('$')
def check(possibility):
return hashlib.sha1(SALT + possibility).hexdigest() == PASSWORD
f = open('solutions/official/CSW12.txt', 'rb')
for row in f:
row = row.rstrip()
if not row:
continue
if ' ' in row:
word, _ = row.split(' ', 1)
else:
word = row
if check(word.lower()):
print(u'DECODED {0}'.format(word))
break
else:
print(u'Solution not found')
f.close()
| 3.4375
| 3
|
features/features_classes/kl_divergence.py
|
swisscom/ai-research-data-valuation-repository
| 0
|
12777152
|
"""Copyright © 2020-present, Swisscom (Schweiz) AG.
All rights reserved."""
from .feature import Feature
from scipy.stats import entropy
import numpy as np
class KLDivergence(Feature):
r"""
A feature that computes the KL divergence between the
logits of each data points given by a classifier mean logits
for each label and the mean of these logits for each label
----------
mean_logits : array-like of shape (n_classes, n_classes) is the mean of the logits of datapoints
having the same label. First dimension should be labels, second should be the mean logit for
this label
Attributes
----------
mean_logits: ' '
"""
def __init__(self, mean_logits):
self.mean_logits = mean_logits
def augment(self, logits):
"""
Performs the data augmentation.
Computes the KL divergence between the parameter logits and
the attribute mean_logits
:param
logits: array-like of shape (n_classes, n_samples)
:return:
C : array-like of shape (n_classes, n_samples)
"""
return np.array([entropy(logits,
np.repeat(mean_logit[..., np.newaxis], logits.shape[1], axis=1), base=2)
for mean_logit in self.mean_logits])
| 3.734375
| 4
|
pkgs/anaconda-navigator-1.1.0-py27_0/lib/python2.7/site-packages/anaconda_navigator/utils/errors.py
|
wangyum/anaconda
| 0
|
12777153
|
# -*- coding: utf-8 -*-
#
# Copyright 2016 Continuum Analytics, Inc.
# May be copied and distributed freely only as part of an Anaconda or
# Miniconda installation.
#
"""
Custom errors on Anaconda Navigator.
"""
class AnacondaNavigatorException(Exception):
pass
| 1.21875
| 1
|
spectre/trading/stopmodel.py
|
rajach/spectre
| 302
|
12777154
|
<filename>spectre/trading/stopmodel.py
"""
@author: Heerozh (<NAME>)
@copyright: Copyright 2019-2020, Heerozh. All rights reserved.
@license: Apache 2.0
@email: <EMAIL>
"""
import math
def sign(x):
return math.copysign(1, x)
class PriceTracker:
def __init__(self, current_price, recorder=max):
self.last_price = current_price
self.recorder = recorder
self.recorded_price = current_price
self.tracking_position = None
def update_price(self, last_price):
self.recorded_price = self.recorder(self.recorded_price, last_price)
self.last_price = last_price
def process_split(self, inverse_ratio: float):
self.recorded_price /= inverse_ratio
# -----------------------------------------------------------------------------
class StopTracker(PriceTracker):
def __init__(self, current_price, stop_price, callback):
super().__init__(current_price, lambda _, x: x)
self._stop_price = stop_price
self.stop_loss = stop_price < current_price
self.callback = callback
@property
def stop_price(self):
return self._stop_price
def fire(self, *args):
if callable(self.callback):
return self.callback(*args)
else:
return self.callback
def check_trigger(self, *args):
if self.stop_loss:
if self.last_price <= self.stop_price:
return self.fire(*args)
else:
if self.last_price >= self.stop_price:
return self.fire(*args)
return False
class StopModel:
def __init__(self, ratio: float, callback=None):
self.ratio = ratio
self.callback = callback
def new_tracker(self, current_price, inverse):
if inverse:
stop_price = current_price * (1 - self.ratio)
else:
stop_price = current_price * (1 + self.ratio)
return StopTracker(current_price, stop_price, self.callback)
# -----------------------------------------------------------------------------
class TrailingStopTracker(StopTracker):
def __init__(self, current_price, ratio, callback):
self.ratio = ratio
stop_price = current_price * (1 + self.ratio)
StopTracker.__init__(self, current_price, stop_price, callback=callback)
PriceTracker.__init__(self, current_price, recorder=max if ratio < 0 else min)
@property
def stop_price(self):
return self.recorded_price * (1 + self.ratio)
class TrailingStopModel(StopModel):
"""
Unlike trailing stop order, the ratio in this model is relative to the highest / lowest price,
so -0.1 means stop price is 90% of the highest price from now to the future; 0.1 means stop
price is 110% of the lowest price from now to the future.
"""
def new_tracker(self, current_price, inverse):
ratio = -self.ratio if inverse else self.ratio
return TrailingStopTracker(current_price, ratio, self.callback)
# -----------------------------------------------------------------------------
class DecayTrailingStopTracker(TrailingStopTracker):
def __init__(self, current_price, ratio, target, decay_rate, max_decay, callback):
self.initial_ratio = ratio
self.max_decay = max_decay
self.decay_rate = decay_rate
self.target = target
super().__init__(current_price, ratio, callback)
@property
def current(self):
raise NotImplementedError("abstractmethod")
@property
def stop_price(self):
decay = max(self.decay_rate ** (self.current / self.target), self.max_decay)
self.ratio = self.initial_ratio * decay
return self.recorded_price * (1 + self.ratio)
class PnLDecayTrailingStopTracker(DecayTrailingStopTracker):
@property
def current(self):
pos = self.tracking_position
pnl = (self.recorded_price / pos.average_price - 1) * sign(pos.shares)
pnl = max(pnl, 0) if self.target > 0 else min(pnl, 0)
return pnl
class PnLDecayTrailingStopModel(StopModel):
"""
Exponential decay to the stop ratio: `ratio * decay_rate ^ (PnL% / PnL_target%)`.
If it's stop gain model, `PnL_target` should be Loss Target (negative).
So, the lower the `ratio` when PnL% approaches the target, and if PnL% exceeds PnL_target%,
any small opposite changes will trigger stop.
"""
def __init__(self, ratio: float, pnl_target: float, callback=None,
decay_rate=0.05, max_decay=0):
super().__init__(ratio, callback)
self.decay_rate = decay_rate
self.pnl_target = pnl_target
self.max_decay = max_decay
def new_tracker(self, current_price, inverse):
ratio = -self.ratio if inverse else self.ratio
return PnLDecayTrailingStopTracker(
current_price, ratio, self.pnl_target, self.decay_rate, self.max_decay, self.callback)
class TimeDecayTrailingStopTracker(DecayTrailingStopTracker):
@property
def current(self):
pos = self.tracking_position
return pos.period
class TimeDecayTrailingStopModel(StopModel):
def __init__(self, ratio: float, period_target: 'pd.Timedelta', callback=None,
decay_rate=0.05, max_decay=0):
super().__init__(ratio, callback)
self.decay_rate = decay_rate
self.period_target = period_target
self.max_decay = max_decay
def new_tracker(self, current_price, inverse):
ratio = -self.ratio if inverse else self.ratio
return TimeDecayTrailingStopTracker(
current_price, ratio, self.period_target, self.decay_rate, self.max_decay,
self.callback)
| 2.484375
| 2
|
src/chameleon/nodes.py
|
fschulze/chameleon
| 0
|
12777155
|
<filename>src/chameleon/nodes.py
from .astutil import Node
class UseExternalMacro(Node):
"""Extend external macro."""
_fields = "expression", "slots", "extend"
class Sequence(Node):
"""Element sequence."""
_fields = "items",
def __nonzero__(self):
return bool(self.items)
class Content(Node):
"""Content substitution."""
_fields = "expression", "char_escape", "translate"
class Default(Node):
"""Represents a default value."""
_fields = "marker",
class CodeBlock(Node):
_fields = "source",
class Value(Node):
"""Expression object value."""
_fields = "value",
def __repr__(self):
try:
line, column = self.value.location
except AttributeError:
line, column = 0, 0
return "<%s %r (%d:%d)>" % (
type(self).__name__, self.value, line, column
)
class Substitution(Value):
"""Expression value for text substitution."""
_fields = "value", "char_escape", "default"
default = None
class Boolean(Value):
_fields = "value", "s"
class Negate(Node):
"""Wraps an expression with a negation."""
_fields = "value",
class Element(Node):
"""XML element."""
_fields = "start", "end", "content"
class DictAttributes(Node):
"""Element attributes from one or more Python dicts."""
_fields = "expression", "char_escape", "quote", "exclude"
class Attribute(Node):
"""Element attribute."""
_fields = "name", "expression", "quote", "eq", "space", "filters"
class Start(Node):
"""Start-tag."""
_fields = "name", "prefix", "suffix", "attributes"
class End(Node):
"""End-tag."""
_fields = "name", "space", "prefix", "suffix"
class Condition(Node):
"""Node visited only if some condition holds."""
_fields = "expression", "node", "orelse"
class Identity(Node):
"""Condition expression that is true on identity."""
_fields = "expression", "value"
class Equality(Node):
"""Condition expression that is true on equality."""
_fields = "expression", "value"
class Cache(Node):
"""Cache (evaluate only once) the value of ``expression`` inside
``node``.
"""
_fields = "expressions", "node"
class Cancel(Cache):
pass
class Copy(Node):
_fields = "expression",
class Assignment(Node):
"""Variable assignment."""
_fields = "names", "expression", "local"
class Alias(Assignment):
"""Alias assignment.
Note that ``expression`` should be a cached or global value.
"""
local = False
class Define(Node):
"""Variable definition in scope."""
_fields = "assignments", "node"
class Repeat(Assignment):
"""Iterate over provided assignment and repeat body."""
_fields = "names", "expression", "local", "whitespace", "node"
class Macro(Node):
"""Macro definition."""
_fields = "name", "body"
class Program(Node):
_fields = "name", "body"
class Module(Node):
_fields = "name", "program",
class Context(Node):
_fields = "node",
class Text(Node):
"""Static text output."""
_fields = "value",
class Interpolation(Text):
"""String interpolation output."""
_fields = "value", "braces_required", "translation"
class Translate(Node):
"""Translate node."""
_fields = "msgid", "node"
class Name(Node):
"""Translation name."""
_fields = "name", "node"
class Domain(Node):
"""Update translation domain."""
_fields = "name", "node"
class OnError(Node):
_fields = "fallback", "name", "node"
class UseInternalMacro(Node):
"""Use internal macro (defined inside same program)."""
_fields = "name",
class FillSlot(Node):
"""Fill a macro slot."""
_fields = "name", "node"
class DefineSlot(Node):
"""Define a macro slot."""
_fields = "name", "node"
| 2.453125
| 2
|
Anime_Downloader.py
|
Noah670/Anime-Downloader
| 3
|
12777156
|
import Anime_Scraper
import Color
import warnings
import ssl
import argparse
import requests
import shutil
import os
import re
import sys
from platform import system
from threading import Thread
from queue import Queue
from art import text2art
directory = ""
threads = 1
token = None
titles = False
args = None
gui = None
class Worker(Thread) :
def __init__(self, tasks) :
Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.start()
def run(self) :
global gui
while True :
func, arg, kargs = self.tasks.get()
try :
func(*arg, **kargs)
except Exception as ex :
# print(ex)
Color.printer("ERROR", ex, gui)
finally :
self.tasks.task_done()
class ThreadPool :
def __init__(self, num_threads) :
self.tasks = Queue(num_threads)
for _ in range(num_threads) :
Worker(self.tasks)
def add_task(self, func, *arg, **kargs) :
self.tasks.put((func, arg, kargs))
def map(self, func, args_list) :
for arg in args_list :
self.add_task(func, arg)
def wait_completion(self) :
self.tasks.join()
def clean_file_name(file_name) :
for c in r'[]/\;,><&*:%=+@#^()|?^':
file_name = file_name.replace(c,'')
return file_name
def download_episode(episode) :
global titles, gui
Color.printer("INFO","Downloading "+episode.episode+"...", gui)
if system() == "Windows" :
episode.title = clean_file_name(episode.title)
if titles :
file_name = directory + episode.episode + " - " + episode.title + ".mp4"
else :
file_name = directory+episode.episode+".mp4"
with requests.get(episode.download_url, stream=True, verify=False) as r:
with open(file_name, 'wb') as f:
shutil.copyfileobj(r.raw, f, length=16*1024*1024)
Color.printer("INFO",episode.episode + " finished downloading...", gui)
def download() :
global directory, threads, gui
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
# Legacy Python that doesn't verify HTTPS certificates by default
pass
else:
# Handle target environment that doesn't support HTTPS verification
ssl._create_default_https_context = _create_unverified_https_context
Color.printer("INFO","Downloading started...", gui)
# for episode in Anime_Scraper.episodes :
# print("Downloading", episode.episode)
# urllib.request.urlretrieve(episode.download_url, directory+episode.episode+".mp4")
pool = ThreadPool(threads)
pool.map(download_episode, Anime_Scraper.episodes)
pool.wait_completion()
Color.printer("INFO", "Downloading finished!", gui)
def print_banner() :
banner = text2art("Anime Downloader")
Color.printer("BANNER", banner)
def main() :
global directory, args, threads, titles, token
print_banner()
parser = argparse.ArgumentParser(description="Anime Downloader Command Line Tool")
argparse.ArgumentParser(description="Help option parcer for Anime Downloader Command Line Tool", add_help=False, formatter_class=argparse.HelpFormatter)
parser.add_argument("-u", "--url", required=True, help="9Anime.to URL for the anime to be downloaded", dest="url")
parser.add_argument("-n", "--names", required=True, help="https://www.animefillerlist.com/ URL to retrieve episode titles", dest="title_url")
parser.add_argument("-d", "--directory", required=False, help="Download destination. Will use the current directory if not provided", default="" , dest="dir")
parser.add_argument("-s", "--start", required=False, help="Starting episode",default=1, type=int , dest="start")
parser.add_argument("-e", "--end", required=False, help="End episode", default=9999, type=int ,dest="end")
parser.add_argument("-c", "--code", required=False, help="Recaptcha answer token code. Insert this if you don't have 2captcha captcha bypass api_key", default=None, dest="token")
parser.add_argument("-t", "--threads", required=False, help="Number of parrallel downloads. Will download sequencially if not provided", default=1, type=int ,dest="threads")
parser.add_argument("-f", "--filler", required=False, help="Whether fillers needed", default=True, type=bool ,dest="isFiller")
args = parser.parse_args()
Anime_Scraper.download_9anime_url = args.url
Anime_Scraper.title_url = args.title_url
Anime_Scraper.isFiller = args.isFiller
# Anime_Scraper.ts_no = args.ts_no
token = args.token
directory = args.dir
threads = args.threads
if args.title_url :
titles = True
if directory != "" :
directory = directory.replace("\\", "/")
if not directory.endswith("/") :
directory+="/"
Anime_Scraper.main(args.start, args.end, token)
download()
if __name__ == "__main__":
#suppress warnings
warnings.filterwarnings("ignore")
#activate color codes
if sys.platform.lower() == "win32" :
os.system("color")
main()
| 2.78125
| 3
|
s3stash/nxstashref_image.py
|
barbarahui/nuxeo-calisphere
| 0
|
12777157
|
<reponame>barbarahui/nuxeo-calisphere
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
import os
from s3stash.nxstashref import NuxeoStashRef
from ucldc_iiif.convert import Convert
import logging
PRECONVERT = ['image/jpeg', 'image/gif', 'image/png']
class NuxeoStashImage(NuxeoStashRef):
''' Base class for fetching a Nuxeo image file, converting it to jp2
and stashing it in S3 '''
def __init__(self,
path,
bucket='ucldc-private-files/jp2000',
region='us-west-2',
pynuxrc='~/.pynuxrc',
replace=False,
**kwargs):
super(NuxeoStashImage, self).__init__(path, bucket, region, pynuxrc,
replace, **kwargs)
self.logger = logging.getLogger(__name__)
self.source_filename = "sourcefile"
self.source_filepath = os.path.join(self.tmp_dir, self.source_filename)
self.magick_tiff_filepath = os.path.join(self.tmp_dir, 'magicked.tif')
self.uncompressed_tiff_filepath = os.path.join(self.tmp_dir,
'uncompressed.tif')
self.srgb_tiff_filepath = os.path.join(self.tmp_dir, 'srgb.tiff')
self.prepped_filepath = os.path.join(self.tmp_dir, 'prepped.tiff')
name, ext = os.path.splitext(self.source_filename)
self.jp2_filepath = os.path.join(self.tmp_dir, name + '.jp2')
self.convert = Convert()
def nxstashref(self):
''' download file, convert to iiif-compatible format,
and stash on s3 '''
self.report['converted'] = False
self.report['stashed'] = False
# first see if this looks like a valid file to try to convert
is_image, image_msg = self._is_image()
self._update_report('is_image',
{'is_image': is_image,
'msg': image_msg})
self._update_report('precheck', {'pass': False, 'msg': image_msg})
if not is_image:
self._remove_tmp()
return self.report
self.has_file = self.dh.has_file(self.metadata)
self._update_report('has_file', self.has_file)
if not self.has_file:
return self.report
self.file_info = self._get_file_info(self.metadata)
self.source_mimetype = self.file_info['mimetype']
passed, precheck_msg = self.convert._pre_check(self.source_mimetype)
self._update_report('precheck', {'pass': passed, 'msg': precheck_msg})
if not passed:
self._remove_tmp()
return self.report
self.s3_stashed = self._is_s3_stashed()
self._update_report('already_s3_stashed', self.s3_stashed)
if not self.replace and self.s3_stashed:
return self.report
# get file details
self.source_download_url = self.file_info['url']
#self.source_filename = self.file_info['filename']
self.source_filename = 'sourcefile'
self.source_filepath = os.path.join(self.tmp_dir, self.source_filename)
self._update_report('source_download_url', self.source_download_url)
self._update_report('source_mimetype', self.source_mimetype)
self._update_report('filename', self.source_filename)
self._update_report('filepath', self.source_filepath)
# grab the file to convert
self._download_nuxeo_file()
# convert to jp2
converted, jp2_report = self._create_jp2()
self._update_report('create_jp2', jp2_report)
self._update_report('converted', converted)
if not converted:
self._remove_tmp()
return self.report
# stash in s3
stashed, s3_report = self._s3_stash(self.jp2_filepath, 'image/jp2')
self._update_report('s3_stash', s3_report)
self._update_report('stashed', stashed)
self._remove_tmp()
return self.report
def _is_image(self):
''' do a basic check to see if this is an image '''
# check Nuxeo object type
try:
type = self.metadata['type']
except KeyError:
msg = "Could not find Nuxeo metadata type for object. " \
"Setting nuxeo type to None"
return False, msg
if type in ['SampleCustomPicture']:
msg = "Nuxeo type is {}".format(type)
return True, msg
else:
msg = "Nuxeo type is {}".format(type)
return False, msg
def _create_jp2(self):
''' convert a local image to a jp2
'''
report = {}
# prep file for conversion to jp2
if self.source_mimetype in PRECONVERT:
preconverted, preconvert_msg = self.convert._pre_convert(
self.source_filepath, self.magick_tiff_filepath)
report['pre_convert'] = {
'preconverted': preconverted,
'msg': preconvert_msg
}
tiff_to_srgb, tiff_to_srgb_msg = self.convert._tiff_to_srgb_libtiff(
self.magick_tiff_filepath, self.prepped_filepath)
report['tiff_to_srgb'] = {
'tiff_to_srgb': tiff_to_srgb,
'msg': tiff_to_srgb_msg
}
elif self.source_mimetype == 'image/tiff':
uncompressed, uncompress_msg = self.convert._pre_convert(
self.source_filepath, self.uncompressed_tiff_filepath)
report['uncompress_tiff'] = {
'uncompressed': uncompressed,
'msg': uncompress_msg
}
tiff_to_srgb, tiff_to_srgb_msg = self.convert._tiff_to_srgb_libtiff(
self.uncompressed_tiff_filepath, self.prepped_filepath)
report['tiff_to_srgb'] = {
'tiff_to_srgb': tiff_to_srgb,
'msg': tiff_to_srgb_msg
}
elif self.source_mimetype in ('image/jp2', 'image/jpx', 'image/jpm'):
uncompressed, uncompress_msg = self.convert._uncompress_jp2000(
self.source_filepath, self.prepped_filepath)
report['uncompress_jp2000'] = {
'uncompressed': uncompressed,
'msg': uncompress_msg
}
else:
msg = "Did not know how to prep file with mimetype {} for " \
"conversion to jp2.".format(self.source_mimetype)
self.logger.warning(msg)
report['status'] = 'unknown mimetype'
report['msg'] = "Did not know how to prep file with mimetype {} " \
"for conversion to jp2.".format(self.source_mimetype)
return report
# convert to sRGB
# create jp2
converted, jp2_msg = self.convert._tiff_to_jp2(self.prepped_filepath,
self.jp2_filepath)
report['convert_tiff_to_jp2'] = {
'converted': converted,
'msg': jp2_msg
}
return converted, report
def main(argv=None):
pass
if __name__ == "__main__":
sys.exit(main())
"""
Copyright © 2014, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the University of California nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
| 2
| 2
|
datahub/sql/controllers/gcp/create_dataset_controller.py
|
arpitkjain7/synapse
| 2
|
12777158
|
<filename>datahub/sql/controllers/gcp/create_dataset_controller.py
from commons.external_call import APIInterface
from sql import config
from sql.crud.dataset_crud import CRUDDataset
from datetime import datetime
class CreateDatasetController:
def __init__(self):
self.gcp_config = config.get("core_engine").get("gcp")
self.CRUDDataset = CRUDDataset()
def create_text_classification_dataset_controller(self, request):
uuid = str(int(datetime.now().timestamp()) * 10000)
create_dataset_url = (
self.gcp_config.get("automl")
.get("text")
.get("create_classification_dataset")
)
create_dataset_request = request.dict(exclude_none=True)
response, status_code = APIInterface.post(
route=create_dataset_url, data=create_dataset_request
)
print(f"{response=}")
if status_code == 200:
crud_request = {
"dataset_id": response.get("dataset_id"),
"alias_name": create_dataset_request.get("display_name"),
"UUID": uuid,
"status": "Created",
"problem_type": "text_classification",
}
print(f"{crud_request=}")
self.CRUDDataset.create(**crud_request)
return {
"dataset_name": create_dataset_request.get("display_name"),
"dataset_id": response.get("dataset_id"),
}
else:
# TODO: error
pass
return {"status": "create dataset failed"}
def create_ner_dataset_controller(self, request):
uuid = str(int(datetime.now().timestamp()) * 10000)
create_dataset_url = (
self.gcp_config.get("automl").get("text").get("create_ner_dataset")
)
create_dataset_request = request.dict(exclude_none=True)
response, status_code = APIInterface.post(
route=create_dataset_url, data=create_dataset_request
)
if status_code == 200:
crud_request = {
"dataset_id": response.get("dataset_id"),
"alias_name": create_dataset_request.get("display_name"),
"UUID": uuid,
"status": "Created",
"problem_type": "text_ner",
}
self.CRUDDataset.create(**crud_request)
return {
"dataset_name": create_dataset_request.get("display_name"),
"dataset_id": response.get("dataset_id"),
}
else:
# TODO: error
pass
return {"status": "create dataset failed"}
def create_image_classification_dataset_controller(self, request):
uuid = str(int(datetime.now().timestamp()) * 10000)
create_dataset_url = (
self.gcp_config.get("automl")
.get("image")
.get("create_image_classification_dataset")
)
create_dataset_request = request.dict(exclude_none=True)
response, status_code = APIInterface.post(
route=create_dataset_url, data=create_dataset_request
)
if status_code == 200:
crud_request = {
"dataset_id": response.get("dataset_id"),
"alias_name": create_dataset_request.get("display_name"),
"UUID": uuid,
"status": "Created",
"problem_type": "image_classification",
}
self.CRUDDataset.create(**crud_request)
return {
"dataset_name": create_dataset_request.get("display_name"),
"dataset_id": response.get("dataset_id"),
}
else:
# TODO: error
pass
return {"status": "create dataset failed"}
def create_object_detection_dataset_controller(self, request):
uuid = str(int(datetime.now().timestamp()) * 10000)
create_dataset_url = (
self.gcp_config.get("automl")
.get("image")
.get("create_object_detection_dataset")
)
create_dataset_request = request.dict(exclude_none=True)
response, status_code = APIInterface.post(
route=create_dataset_url, data=create_dataset_request
)
if status_code == 200:
crud_request = {
"dataset_id": response.get("dataset_id"),
"alias_name": create_dataset_request.get("display_name"),
"UUID": uuid,
"status": "Created",
"problem_type": "object_detection",
}
self.CRUDDataset.create(**crud_request)
return {
"dataset_name": create_dataset_request.get("display_name"),
"dataset_id": response.get("dataset_id"),
}
else:
# TODO: error
pass
return {"status": "create dataset failed"}
| 2.75
| 3
|
aws_managers/athena/queries/real_column_query.py
|
vahndi/aws-managers
| 0
|
12777159
|
<reponame>vahndi/aws-managers
from aws_managers.athena.queries.column_query import ColumnQuery
from aws_managers.athena.functions.aggregate import AvgMixin, \
GeometricMeanMixin, MaxMixin, MinMixin, SumMixin
class RealColumnQuery(
AvgMixin,
GeometricMeanMixin,
MaxMixin,
MinMixin,
SumMixin,
ColumnQuery
):
pass
| 1.679688
| 2
|
ptpy/extensions/nikon.py
|
coon42/sequoia-ptpy
| 46
|
12777160
|
<filename>ptpy/extensions/nikon.py
'''This module extends PTP for Nikon devices.
Use it in a master module that determines the vendor and automatically uses its
extension. This is why inheritance is not explicit.
'''
from ..util import _main_thread_alive
from construct import (
Container, PrefixedArray, Struct,
)
from contextlib import contextmanager
from six.moves.queue import Queue
from threading import Thread, Event
from time import sleep
import atexit
import logging
logger = logging.getLogger(__name__)
__all__ = ('Nikon',)
class Nikon(object):
'''This class implements Nikon's PTP operations.'''
def __init__(self, *args, **kwargs):
logger.debug('Init Nikon')
super(Nikon, self).__init__(*args, **kwargs)
# TODO: expose the choice to poll or not Nikon events
self.__no_polling = False
self.__nikon_event_shutdown = Event()
self.__nikon_event_proc = None
@contextmanager
def session(self):
'''
Manage Nikon session with context manager.
'''
# When raw device, do not perform
if self.__no_polling:
with super(Nikon, self).session():
yield
return
# Within a normal PTP session
with super(Nikon, self).session():
# launch a polling thread
self.__event_queue = Queue()
self.__nikon_event_proc = Thread(
name='NikonEvtPolling',
target=self.__nikon_poll_events
)
self.__nikon_event_proc.daemon = False
atexit.register(self._nikon_shutdown)
self.__nikon_event_proc.start()
try:
yield
finally:
self._nikon_shutdown()
def _shutdown(self):
self._nikon_shutdown()
super(Nikon, self)._shutdown()
def _nikon_shutdown(self):
logger.debug('Shutdown Nikon events')
self.__nikon_event_shutdown.set()
# Only join a running thread.
if self.__nikon_event_proc and self.__nikon_event_proc.is_alive():
self.__nikon_event_proc.join(2)
def _PropertyCode(self, **product_properties):
props = {
'ShootingBank': 0xD010,
'ShootingBankNameA': 0xD011,
'ShootingBankNameB': 0xD012,
'ShootingBankNameC': 0xD013,
'ShootingBankNameD': 0xD014,
'ResetBank0': 0xD015,
'RawCompression': 0xD016,
'WhiteBalanceAutoBias': 0xD017,
'WhiteBalanceTungstenBias': 0xD018,
'WhiteBalanceFluorescentBias': 0xD019,
'WhiteBalanceDaylightBias': 0xD01A,
'WhiteBalanceFlashBias': 0xD01B,
'WhiteBalanceCloudyBias': 0xD01C,
'WhiteBalanceShadeBias': 0xD01D,
'WhiteBalanceColorTemperature': 0xD01E,
'WhiteBalancePresetNo': 0xD01F,
'WhiteBalancePresetName0': 0xD020,
'WhiteBalancePresetName1': 0xD021,
'WhiteBalancePresetName2': 0xD022,
'WhiteBalancePresetName3': 0xD023,
'WhiteBalancePresetName4': 0xD024,
'WhiteBalancePresetVal0': 0xD025,
'WhiteBalancePresetVal1': 0xD026,
'WhiteBalancePresetVal2': 0xD027,
'WhiteBalancePresetVal3': 0xD028,
'WhiteBalancePresetVal4': 0xD029,
'ImageSharpening': 0xD02A,
'ToneCompensation': 0xD02B,
'ColorModel': 0xD02C,
'HueAdjustment': 0xD02D,
'NonCPULensDataFocalLength': 0xD02E,
'NonCPULensDataMaximumAperture': 0xD02F,
'ShootingMode': 0xD030,
'JPEGCompressionPolicy': 0xD031,
'ColorSpace': 0xD032,
'AutoDXCrop': 0xD033,
'FlickerReduction': 0xD034,
'RemoteMode': 0xD035,
'VideoMode': 0xD036,
'NikonEffectMode': 0xD037,
'Mode': 0xD038,
'CSMMenuBankSelect': 0xD040,
'MenuBankNameA': 0xD041,
'MenuBankNameB': 0xD042,
'MenuBankNameC': 0xD043,
'MenuBankNameD': 0xD044,
'ResetBank': 0xD045,
'A1AFCModePriority': 0xD048,
'A2AFSModePriority': 0xD049,
'A3GroupDynamicAF': 0xD04A,
'A4AFActivation': 0xD04B,
'FocusAreaIllumManualFocus': 0xD04C,
'FocusAreaIllumContinuous': 0xD04D,
'FocusAreaIllumWhenSelected': 0xD04E,
'FocusAreaWrap': 0xD04F,
'VerticalAFON': 0xD050,
'AFLockOn': 0xD051,
'FocusAreaZone': 0xD052,
'EnableCopyright': 0xD053,
'ISOAuto': 0xD054,
'EVISOStep': 0xD055,
'EVStep': 0xD056,
'EVStepExposureComp': 0xD057,
'ExposureCompensation': 0xD058,
'CenterWeightArea': 0xD059,
'ExposureBaseMatrix': 0xD05A,
'ExposureBaseCenter': 0xD05B,
'ExposureBaseSpot': 0xD05C,
'LiveViewAFArea': 0xD05D,
'AELockMode': 0xD05E,
'AELAFLMode': 0xD05F,
'LiveViewAFFocus': 0xD061,
'MeterOff': 0xD062,
'SelfTimer': 0xD063,
'MonitorOff': 0xD064,
'ImgConfTime': 0xD065,
'AutoOffTimers': 0xD066,
'AngleLevel': 0xD067,
'D1ShootingSpeed': 0xD068,
'D2MaximumShots': 0xD069,
'ExposureDelayMode': 0xD06A,
'LongExposureNoiseReduction': 0xD06B,
'FileNumberSequence': 0xD06C,
'ControlPanelFinderRearControl': 0xD06D,
'ControlPanelFinderViewfinder': 0xD06E,
'D7Illumination': 0xD06F,
'NrHighISO': 0xD070,
'SHSetCHGUIDDisp': 0xD071,
'ArtistName': 0xD072,
'NikonCopyrightInfo': 0xD073,
'FlashSyncSpeed': 0xD074,
'FlashShutterSpeed': 0xD075,
'E3AAFlashMode': 0xD076,
'E4ModelingFlash': 0xD077,
'BracketSet': 0xD078,
'E6ManualModeBracketing': 0xD079,
'BracketOrder': 0xD07A,
'E8AutoBracketSelection': 0xD07B,
'BracketingSet': 0xD07C,
'F1CenterButtonShootingMode': 0xD080,
'CenterButtonPlaybackMode': 0xD081,
'F2Multiselector': 0xD082,
'F3PhotoInfoPlayback': 0xD083,
'F4AssignFuncButton': 0xD084,
'F5CustomizeCommDials': 0xD085,
'ReverseCommandDial': 0xD086,
'ApertureSetting': 0xD087,
'MenusAndPlayback': 0xD088,
'F6ButtonsAndDials': 0xD089,
'NoCFCard': 0xD08A,
'CenterButtonZoomRatio': 0xD08B,
'FunctionButton2': 0xD08C,
'AFAreaPoint': 0xD08D,
'NormalAFOn': 0xD08E,
'CleanImageSensor': 0xD08F,
'ImageCommentString': 0xD090,
'ImageCommentEnable': 0xD091,
'ImageRotation': 0xD092,
'ManualSetLensNo': 0xD093,
'MovScreenSize': 0xD0A0,
'MovVoice': 0xD0A1,
'MovMicrophone': 0xD0A2,
'MovFileSlot': 0xD0A3,
'MovRecProhibitCondition': 0xD0A4,
'ManualMovieSetting': 0xD0A6,
'MovQuality': 0xD0A7,
'LiveViewScreenDisplaySetting': 0xD0B2,
'MonitorOffDelay': 0xD0B3,
'Bracketing': 0xD0C0,
'AutoExposureBracketStep': 0xD0C1,
'AutoExposureBracketProgram': 0xD0C2,
'AutoExposureBracketCount': 0xD0C3,
'WhiteBalanceBracketStep': 0xD0C4,
'WhiteBalanceBracketProgram': 0xD0C5,
'LensID': 0xD0E0,
'LensSort': 0xD0E1,
'LensType': 0xD0E2,
'FocalLengthMin': 0xD0E3,
'FocalLengthMax': 0xD0E4,
'MaxApAtMinFocalLength': 0xD0E5,
'MaxApAtMaxFocalLength': 0xD0E6,
'FinderISODisp': 0xD0F0,
'AutoOffPhoto': 0xD0F2,
'AutoOffMenu': 0xD0F3,
'AutoOffInfo': 0xD0F4,
'SelfTimerShootNum': 0xD0F5,
'VignetteCtrl': 0xD0F7,
'AutoDistortionControl': 0xD0F8,
'SceneMode': 0xD0F9,
'SceneMode2': 0xD0FD,
'SelfTimerInterval': 0xD0FE,
'NikonExposureTime': 0xD100,
'ACPower': 0xD101,
'WarningStatus': 0xD102,
'MaximumShots': 0xD103,
'AFLockStatus': 0xD104,
'AELockStatus': 0xD105,
'FVLockStatus': 0xD106,
'AutofocusLCDTopMode2': 0xD107,
'AutofocusArea': 0xD108,
'FlexibleProgram': 0xD109,
'LightMeter': 0xD10A,
'RecordingMedia': 0xD10B,
'USBSpeed': 0xD10C,
'CCDNumber': 0xD10D,
'CameraOrientation': 0xD10E,
'GroupPtnType': 0xD10F,
'FNumberLock': 0xD110,
'ExposureApertureLock': 0xD111,
'TVLockSetting': 0xD112,
'AVLockSetting': 0xD113,
'IllumSetting': 0xD114,
'FocusPointBright': 0xD115,
'ExternalFlashAttached': 0xD120,
'ExternalFlashStatus': 0xD121,
'ExternalFlashSort': 0xD122,
'ExternalFlashMode': 0xD123,
'ExternalFlashCompensation': 0xD124,
'NewExternalFlashMode': 0xD125,
'FlashExposureCompensation': 0xD126,
'HDRMode': 0xD130,
'HDRHighDynamic': 0xD131,
'HDRSmoothing': 0xD132,
'OptimizeImage': 0xD140,
'Saturation': 0xD142,
'BWFillerEffect': 0xD143,
'BWSharpness': 0xD144,
'BWContrast': 0xD145,
'BWSettingType': 0xD146,
'Slot2SaveMode': 0xD148,
'RawBitMode': 0xD149,
'ActiveDLighting': 0xD14E,
'FlourescentType': 0xD14F,
'TuneColourTemperature': 0xD150,
'TunePreset0': 0xD151,
'TunePreset1': 0xD152,
'TunePreset2': 0xD153,
'TunePreset3': 0xD154,
'TunePreset4': 0xD155,
'BeepOff': 0xD160,
'AutofocusMode': 0xD161,
'AFAssist': 0xD163,
'PADVPMode': 0xD164,
'ImageReview': 0xD165,
'AFAreaIllumination': 0xD166,
'NikonFlashMode': 0xD167,
'FlashCommanderMode': 0xD168,
'FlashSign': 0xD169,
'_ISOAuto': 0xD16A,
'RemoteTimeout': 0xD16B,
'GridDisplay': 0xD16C,
'FlashModeManualPower': 0xD16D,
'FlashModeCommanderPower': 0xD16E,
'AutoFP': 0xD16F,
'DateImprintSetting': 0xD170,
'DateCounterSelect': 0xD171,
'DateCountData': 0xD172,
'DateCountDisplaySetting': 0xD173,
'RangeFinderSetting': 0xD174,
'CSMMenu': 0xD180,
'WarningDisplay': 0xD181,
'BatteryCellKind': 0xD182,
'ISOAutoHiLimit': 0xD183,
'DynamicAFArea': 0xD184,
'ContinuousSpeedHigh': 0xD186,
'InfoDispSetting': 0xD187,
'PreviewButton': 0xD189,
'PreviewButton2': 0xD18A,
'AEAFLockButton2': 0xD18B,
'IndicatorDisp': 0xD18D,
'CellKindPriority': 0xD18E,
'BracketingFramesAndSteps': 0xD190,
'LiveViewMode': 0xD1A0,
'LiveViewDriveMode': 0xD1A1,
'LiveViewStatus': 0xD1A2,
'LiveViewImageZoomRatio': 0xD1A3,
'LiveViewProhibitCondition': 0xD1A4,
'MovieShutterSpeed': 0xD1A8,
'MovieFNumber': 0xD1A9,
'MovieISO': 0xD1AA,
'LiveViewMovieMode': 0xD1AC,
'ExposureDisplayStatus': 0xD1B0,
'ExposureIndicateStatus': 0xD1B1,
'InfoDispErrStatus': 0xD1B2,
'ExposureIndicateLightup': 0xD1B3,
'FlashOpen': 0xD1C0,
'FlashCharged': 0xD1C1,
'FlashMRepeatValue': 0xD1D0,
'FlashMRepeatCount': 0xD1D1,
'FlashMRepeatInterval': 0xD1D2,
'FlashCommandChannel': 0xD1D3,
'FlashCommandSelfMode': 0xD1D4,
'FlashCommandSelfCompensation': 0xD1D5,
'FlashCommandSelfValue': 0xD1D6,
'FlashCommandAMode': 0xD1D7,
'FlashCommandACompensation': 0xD1D8,
'FlashCommandAValue': 0xD1D9,
'FlashCommandBMode': 0xD1DA,
'FlashCommandBCompensation': 0xD1DB,
'FlashCommandBValue': 0xD1DC,
'ApplicationMode': 0xD1F0,
'ActiveSlot': 0xD1F2,
'ActivePicCtrlItem': 0xD200,
'ChangePicCtrlItem': 0xD201,
'MovieNrHighISO': 0xD236,
'D241': 0xD241,
'D244': 0xD244,
'D247': 0xD247,
'GUID': 0xD24F,
'D250': 0xD250,
'D251': 0xD251,
'ISO': 0xF002,
'ImageCompression': 0xF009,
'NikonImageSize': 0xF00A,
'NikonWhiteBalance': 0xF00C,
# TODO: Are these redundant? Or product-specific?
'_LongExposureNoiseReduction': 0xF00D,
'HiISONoiseReduction': 0xF00E,
'_ActiveDLighting': 0xF00F,
'_MovQuality': 0xF01C,
}
product_properties.update(props)
return super(Nikon, self)._PropertyCode(
**product_properties
)
def _OperationCode(self, **product_operations):
return super(Nikon, self)._OperationCode(
GetProfileAllData=0x9006,
SendProfileData=0x9007,
DeleteProfile=0x9008,
SetProfileData=0x9009,
AdvancedTransfer=0x9010,
GetFileInfoInBlock=0x9011,
Capture=0x90C0,
AFDrive=0x90C1,
SetControlMode=0x90C2,
DelImageSDRAM=0x90C3,
GetLargeThumb=0x90C4,
CurveDownload=0x90C5,
CurveUpload=0x90C6,
CheckEvents=0x90C7,
DeviceReady=0x90C8,
SetPreWBData=0x90C9,
GetVendorPropCodes=0x90CA,
AFCaptureSDRAM=0x90CB,
GetPictCtrlData=0x90CC,
SetPictCtrlData=0x90CD,
DelCstPicCtrl=0x90CE,
GetPicCtrlCapability=0x90CF,
GetPreviewImg=0x9200,
StartLiveView=0x9201,
EndLiveView=0x9202,
GetLiveViewImg=0x9203,
MfDrive=0x9204,
ChangeAFArea=0x9205,
AFDriveCancel=0x9206,
InitiateCaptureRecInMedia=0x9207,
GetVendorStorageIDs=0x9209,
StartMovieRecInCard=0x920A,
EndMovieRec=0x920B,
TerminateCapture=0x920C,
GetDevicePTPIPInfo=0x90E0,
GetPartialObjectHiSpeed=0x9400,
GetDevicePropEx=0x9504,
**product_operations
)
def _ResponseCode(self, **product_responses):
return super(Nikon, self)._ResponseCode(
HardwareError=0xA001,
OutOfFocus=0xA002,
ChangeCameraModeFailed=0xA003,
InvalidStatus=0xA004,
SetPropertyNotSupported=0xA005,
WbResetError=0xA006,
DustReferenceError=0xA007,
ShutterSpeedBulb=0xA008,
MirrorUpSequence=0xA009,
CameraModeNotAdjustFNumber=0xA00A,
NotLiveView=0xA00B,
MfDriveStepEnd=0xA00C,
MfDriveStepInsufficiency=0xA00E,
AdvancedTransferCancel=0xA022,
**product_responses
)
def _EventCode(self, **product_events):
return super(Nikon, self)._EventCode(
ObjectAddedInSDRAM=0xC101,
CaptureCompleteRecInSdram=0xC102,
AdvancedTransfer=0xC103,
PreviewImageAdded=0xC104,
**product_events
)
def _FilesystemType(self, **product_filesystem_types):
return super(Nikon, self)._FilesystemType(
**product_filesystem_types
)
def _NikonEvent(self):
return PrefixedArray(
self._UInt16,
Struct(
'EventCode' / self._EventCode,
'Parameter' / self._UInt32,
)
)
def _set_endian(self, endian):
logger.debug('Set Nikon endianness')
super(Nikon, self)._set_endian(endian)
self._NikonEvent = self._NikonEvent()
# TODO: Add event queue over all transports and extensions.
def check_events(self):
'''Check Nikon specific event'''
ptp = Container(
OperationCode='CheckEvents',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[]
)
response = self.recv(ptp)
return self._parse_if_data(response, self._NikonEvent)
# TODO: Provide a single camera agnostic command that will trigger a camera
def capture(self):
'''Nikon specific capture'''
ptp = Container(
OperationCode='Capture',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[]
)
return self.mesg(ptp)
def af_capture_sdram(self):
'''Nikon specific autofocus and capture to SDRAM'''
ptp = Container(
OperationCode='AFCaptureSDRAM',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[]
)
return self.mesg(ptp)
def event(self, wait=False):
'''Check Nikon or PTP events
If `wait` this function is blocking. Otherwise it may return None.
'''
# TODO: Do something reasonable on wait=True
evt = None
timeout = None if wait else 0.001
# TODO: Join queues to preserve order of Nikon and PTP events.
if not self.__event_queue.empty():
evt = self.__event_queue.get(block=not wait, timeout=timeout)
else:
evt = super(Nikon, self).event(wait=wait)
return evt
def __nikon_poll_events(self):
'''Poll events, adding them to a queue.'''
while (not self.__nikon_event_shutdown.is_set() and
_main_thread_alive()):
try:
evts = self.check_events()
if evts:
for evt in evts:
logger.debug('Event queued')
self.__event_queue.put(evt)
except Exception as e:
logger.error(e)
sleep(3)
self.__nikon_event_shutdown.clear()
| 2.203125
| 2
|
Change Return Program/changereturn.py
|
yashpatel123a/Mini-Projects
| 0
|
12777161
|
def change_return(amount,currency):
currency.sort(reverse = True)
counter = 0
amount_counter = [0]*len(currency)
while amount> 0:
amount_counter[counter] = int(amount/currency[counter])
amount -= amount_counter[counter]*currency[counter]
counter += 1
return [(currency[i],amount_counter[i]) for i in range(len(currency))]
if __name__ == '__main__':
currency = [1,5,10,25]
currency_name = ['quarter','dime','nickel','pennies']
amount = int(input('Enter a amount: '))
change = change_return(amount,currency)
for i in range(len(currency)-1,-1,-1):
print(currency_name[i] + f'({change[i][0]}) - {change[i][1]}')
| 3.71875
| 4
|
AdaBoost/adaboost.py
|
JNero/Machine-Learning-in-action
| 0
|
12777162
|
<reponame>JNero/Machine-Learning-in-action
# -*- coding: utf-8 -*-
# @Time : 17-9-26 下午2:47
# @Author : QIAO
| 1.0625
| 1
|
scripts/pick_model.py
|
jowagner/uuparser
| 86
|
12777163
|
import os
import sys
#usage:
#python file.txt trained_models_dir
# where the file contains iso codes of languages for which you want a model
# and trained_models_dir is a directory containing trained models and their
# evaluation on the dev set for the languages of interest
if len(sys.argv) < 3:
raise Exception("You must specify at least a file with language codes and a directory with models")
else:
include_file = sys.argv[1]
trained_models_dir = sys.argv[2].strip("/")
#make sure there are no annoying spaces
print(f'Removing leading and trailing spaces from {include_file}')
os.system(f"sed -i 's/\\s*//g' {include_file}")
print('Finding best iteration for each language and storing in best_epochs.txt')
cmd = f'./scripts/best_res.sh {include_file} {trained_models_dir} >best_epochs.txt'
os.system(cmd)
d = {}
outdir = trained_models_dir
if len(sys.argv) == 4:
outdir = sys.argv[3]
if not os.path.exists(outdir):
print(f'Creating directory {outdir}')
os.mkdir(outdir)
for line in open('best_epochs.txt','r'):
try:
needed = line.split('dev_epoch_')
lang = needed[0].split(trained_models_dir)[1].strip("/")
epoch = needed[1].split(".conllu")[0]
d[lang] = epoch
except:
IndexError
lang = line.strip()
cmd = f'./scripts/get_last_epoch.sh {lang} {trained_models_dir}'
lastEpoch = os.popen(cmd)
for lang in d:
lpath = outdir + '/' + lang + '/'
if not os.path.exists(lpath):
print(f'Creating directory {lpath}')
os.mkdir(lpath)
infile = trained_models_dir + '/' + lang + '/barchybrid.model' + str(d[lang])
outfile = lpath + 'barchybrid.model'
if os.path.exists(infile):
print(f'Copying {infile} to {outfile}'')
os.system(f'cp {infile} {outfile}')
if outdir != trained_models_dir:
paramfile = trained_models_dir + '/' + lang + '/params.pickle'
print(f'Copying {paramfile} to {lpath}'')
os.system(f'cp {paramfile} {lpath}')
| 2.859375
| 3
|
e/mail-relay/web/apps/mail/migrations/0099_auto_20170522_1050.py
|
zhouli121018/nodejsgm
| 0
|
12777164
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mail', '0098_auto_20170522_0940'),
]
operations = [
migrations.AddField(
model_name='checksettings',
name='hrisk_diff_sender_count',
field=models.IntegerField(default=3, help_text='\u4e00\u5929\u5185 \u540c\u4e00\u53d1\u4ef6\u4eba\u540d\u79f0\u4e0d\u540c\u503c\u8d85\u8fc7N\u6b21\uff0c \u5219\u5728\u4ee5\u540e\u7684M\u65f6\u95f4\u5185\u62e6\u622a\u5176\u6240\u6709\u90ae\u4ef6\uff0c\u5e76\u653e\u5165\u201c\u9ad8\u5371\u53d1\u4ef6\u4eba\u201d\u8fdb\u884c\u4eba\u5de5\u5ba1\u6838', verbose_name='\u540d\u79f0\u4e0d\u540c\u7684\u9ad8\u5371\u53d1\u4ef6\u4eba(\u4e0d\u540c\u6b21\u6570)'),
),
migrations.AddField(
model_name='checksettings',
name='hrisk_diff_sender_time',
field=models.IntegerField(default=600, help_text='\u5355\u4f4d:\u5206\u949f, \u4e00\u5929\u5185 \u540c\u4e00\u53d1\u4ef6\u4eba\u540d\u79f0\u4e0d\u540c\u503c\u8d85\u8fc7N\u6b21\uff0c \u5219\u5728\u4ee5\u540e\u7684M\u65f6\u95f4\u5185\u62e6\u622a\u5176\u6240\u6709\u90ae\u4ef6\uff0c\u5e76\u653e\u5165\u201c\u9ad8\u5371\u53d1\u4ef6\u4eba\u201d\u8fdb\u884c\u4eba\u5de5\u5ba1\u6838', verbose_name='\u540d\u79f0\u4e0d\u540c\u7684\u9ad8\u5371\u53d1\u4ef6\u4eba(\u62e6\u622a\u65f6\u95f4)'),
),
]
| 1.578125
| 2
|
scripts/generate_accelerated_bindings.py
|
miiklay/pymapd
| 0
|
12777165
|
"""
Generate Accelerated Thrift bindings
"""
import os
import argparse
import re
import shutil
import subprocess
import sys
xpr_hints = re.compile(".*completion_hints.*")
def parse_args(args=None):
parser = argparse.ArgumentParser(description='Run some benchmarks')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'),
default=sys.stdin, help="mapd.thrift file")
parser.add_argument('outfile', nargs='?', default="mapd.thrift",
help="Patched mapd.thrift file")
return parser.parse_args(args)
def thrift_gen(spec):
subprocess.check_output(['thrift', '-gen', 'py', '-r', spec])
def main(args=None):
args = parse_args(args)
thrift = args.infile.readlines()
new = [x for x in thrift if not xpr_hints.match(x)]
with open(args.outfile, 'wt') as f:
f.write(''.join(new))
try:
thrift_gen(args.outfile)
shutil.rmtree("mapd", ignore_errors=True)
shutil.copytree(os.path.join("gen-py", "mapd"), "mapd")
finally:
os.remove(args.outfile)
shutil.rmtree("gen-py")
if __name__ == '__main__':
sys.exit(main(None))
| 2.3125
| 2
|
readtagger/cli/__init__.py
|
bardin-lab/read_tagger
| 3
|
12777166
|
<filename>readtagger/cli/__init__.py
"""CLI module for readtagger."""
| 1.164063
| 1
|
wikipedia/statistics/helpers.py
|
vsoch/arxiv-equations
| 2
|
12777167
|
<reponame>vsoch/arxiv-equations
# helpers.py, useful helper functions for wikipedia analysis
# We want to represent known symbols (starting with //) as words, the rest characters
def extract_tokens(tex):
'''walk through a LaTeX string, and grab chunks that correspond with known
identifiers, meaning anything that starts with \ and ends with one or
more whitespaces, a bracket, a ^ or underscore.
'''
regexp = r'\\(.*?)(\w+|\{|\(|\_|\^)'
tokens = []
while re.search(regexp, tex) and len(tex) > 0:
match = re.search(regexp, tex)
# Only take the chunk if it's starting at 0
if match.start() == 0:
tokens.append(tex[match.start():match.end()])
# And update the string
tex = tex[match.end():]
# Otherwise, add the next character to the tokens list
else:
tokens.append(tex[0])
tex = tex[1:]
# When we get down here, the regexp doesn't match anymore! Add remaining
if len(tex) > 0:
tokens = tokens + [t for t in tex]
return tokens
def update_method_name(methods, old_name, new_name):
'''update the set by removing an old name (usually a disambuguation error)
with a new name
Parameters
==========
methods: the set of methods
oldName: the name to remove
newName: the name to add
'''
if old_name in methods:
methods.remove(old_name)
methods.add(new_name)
return methods
| 2.90625
| 3
|
data-structure-exp-toys/exp2/test.py
|
taoky/gadgets
| 5
|
12777168
|
from huffman import HuffZipFile
from os import listdir
from os.path import isfile, join, splitext
import hashlib
import time
def get_md5(path):
md5 = hashlib.md5()
with open(path, "rb") as f:
while True:
data = f.read(4096)
if not data:
break
md5.update(data)
return md5.hexdigest()
if __name__ == "__main__":
for f in listdir("testcase/"):
path = join("testcase/", f)
if isfile(path) and splitext(path)[1] != ".bak" and splitext(path)[1] != ".huff":
print("Start {}".format(f))
start_time = time.time()
from_file = open(path, "rb")
to_file = open(join("testcase/", splitext(f)[0] + ".huff"), "wb")
zip_file = HuffZipFile(decompress=False, file_stream=from_file)
zip_file.compress(to_file)
del zip_file
# quit()
print("File {} has finished compressing. Time {}. Decompressing...".format(f, time.time() - start_time))
start_time = time.time()
from_file = open(join("testcase/", splitext(f)[0] + ".huff"), "rb")
to_file = open(path + ".bak", "wb")
zip_file = HuffZipFile(decompress=True, file_stream=from_file)
zip_file.decompress(to_file)
del zip_file
print("File {} finished decompressing! Time {}.".format(f, time.time() - start_time))
md5_1 = get_md5(path)
md5_2 = get_md5(path + ".bak")
print("Result of {}".format(f))
if md5_1 != md5_2:
print("Wrong!")
else:
print("Right!")
print("")
| 2.734375
| 3
|
snow-dots/utilities/mouse.py
|
cpizzica/Lab-Matlab-Control
| 6
|
12777169
|
#! python3
import pyautogui, sys, time
#print('Press Ctrl-C to quit.')
while True:
x, y = pyautogui.position()
positionStr = ',' + str(x).rjust(4) + ',' + str(y).rjust(4)
print( time.time(), positionStr, '\n', flush=True)
time.sleep(0.05)
| 2.984375
| 3
|
conan/recipes/android-sdk-tools/conanfile.py
|
alexa/aac-sdk
| 139
|
12777170
|
<reponame>alexa/aac-sdk
from conans import ConanFile, tools, RunEnvironment
import os, logging
class AndroidSdkToolsConanFile(ConanFile):
name = "android-sdk-tools"
version = "4.0"
user = "aac-sdk"
channel = "stable"
no_copy_source = True
exports_sources = ["cmake-wrapper.cmd", "cmake-wrapper"]
settings = "os", "arch", "compiler", "build_type"
requires = ["zulu-openjdk/11.0.8"]
options = {
"sdk_version": "ANY",
"ndk_version": "ANY",
"android_stl": ["c++_shared","c++_static"]
}
default_options = {
"sdk_version": "7302050",
"ndk_version": "20.0.5594570",
"android_stl": "c++_shared"
}
@staticmethod
def chmod_plus_x(filename):
if os.name == "posix":
os.chmod(filename, os.stat(filename).st_mode | 0o111)
def fix_permissions(self,root_folder):
if os.name != "posix":
return
for root, _, files in os.walk(root_folder):
for filename in files:
filename = os.path.join(root, filename)
with open(filename, "rb") as f:
sig = f.read(4)
if type(sig) is str:
sig = [ord(s) for s in sig]
else:
sig = [s for s in sig]
if len(sig) > 2 and sig[0] == 0x23 and sig[1] == 0x21:
logging.info(f"chmod on script file: {filename}")
self.chmod_plus_x(filename)
elif sig == [0x7F, 0x45, 0x4C, 0x46]:
logging.info(f"chmod on ELF file: {filename}")
self.chmod_plus_x(filename)
elif sig == [0xCA, 0xFE, 0xBA, 0xBE] or \
sig == [0xBE, 0xBA, 0xFE, 0xCA] or \
sig == [0xFE, 0xED, 0xFA, 0xCF] or \
sig == [0xCF, 0xFA, 0xED, 0xFE] or \
sig == [0xFE, 0xEF, 0xFA, 0xCE] or \
sig == [0xCE, 0xFA, 0xED, 0xFE]:
logging.info(f"chmod on Mach-O file: {filename}")
self.chmod_plus_x(filename)
@property
def _build_os(self):
settings_build = getattr(self,"settings_build",None)
return settings_build.os if settings_build else self.settings.os
def source(self):
if self._build_os == "Macos":
package = f"commandlinetools-mac-{self.options.sdk_version}_latest"
elif self._build_os == "Linux":
package = f"commandlinetools-linux-{self.options.sdk_version}_latest"
else:
raise Exception( f"settings.os not supported: {self._build_os}" )
#download the command line tools package
tools.get( f"https://dl.google.com/android/repository/{package}.zip" )
def package(self):
self.copy( "*", src="cmdline-tools", dst="cmdline-tools" )
self.copy( "cmake-wrapper.cmd" )
self.copy( "cmake-wrapper" )
# fix executable permisions for command line tools
self.fix_permissions(self.package_folder)
# check the license -- needs to be accepted once
sdk_manager = os.path.join( self.package_folder, "cmdline-tools", "bin", "sdkmanager" )
auto_accept_licenses = os.getenv("BUILDER_ACCEPT_LICENSES", "False").lower() == "true"
env_run = RunEnvironment(self)
with tools.environment_append( env_run.vars ):
# check the license -- needs to be accepted once
check_yes_opt = f"yes | {sdk_manager}" if auto_accept_licenses else sdk_manager
self.run( f"{check_yes_opt} --sdk_root={self.package_folder} --licenses", run_environment=True )
# install android sdk
self.run( f"{sdk_manager} --sdk_root={self.package_folder} 'platform-tools' 'platforms;android-{self.settings_target.os.api_level}'", run_environment=True )
# install android ndk
self.run( f"{sdk_manager} --sdk_root={self.package_folder} --install 'ndk;{self.options.ndk_version}'", run_environment=True )
@property
def _platform(self):
return {"Windows": "windows",
"Macos": "darwin",
"Linux": "linux"}.get(str(self._build_os))
@property
def _android_abi(self):
return {"x86": "x86",
"x86_64": "x86_64",
"armv7hf": "armeabi-v7a",
"armv8": "arm64-v8a"}.get(str(self.settings_target.arch))
@property
def _llvm_triplet(self):
arch = {'armv7hf': 'arm',
'armv8': 'aarch64',
'x86': 'i686',
'x86_64': 'x86_64'}.get(str(self.settings_target.arch))
abi = 'androideabi' if self.settings_target.arch == 'armv7hf' else 'android'
return f"{arch}-linux-{abi}"
@property
def _clang_triplet(self):
arch = {'armv7hf': 'armv7a',
'armv8': 'aarch64',
'x86': 'i686',
'x86_64': 'x86_64'}.get(str(self.settings_target.arch))
abi = 'androideabi' if self.settings_target.arch == 'armv7hf' else 'android'
return f"{arch}-linux-{abi}"
@property
def _sdk_home(self):
return os.path.join( self.package_folder )
@property
def _ndk_home(self):
return os.path.join( self.package_folder, "ndk", str(self.options.ndk_version) )
@property
def _ndk_root(self):
return os.path.join(self._ndk_home, "toolchains", "llvm", "prebuilt", f"{self._platform}-x86_64")
def _tool_name(self, tool):
if 'clang' in tool:
suffix = '.cmd' if self._build_os == 'Windows' else ''
return f"{self._clang_triplet}{self.settings_target.os.api_level}-{tool}{suffix}"
else:
suffix = '.exe' if self._build_os == 'Windows' else ''
return f"{self._llvm_triplet}-{tool}{suffix}"
def _define_tool_var(self, name, value):
ndk_bin = os.path.join(self._ndk_root, 'bin')
path = os.path.join(ndk_bin, self._tool_name(value))
logging.info(f"Creating {name} environment variable: {path}")
return path
def package_info(self):
# set the android sdk environment variables
logging.info(f"Creating ANDROID_SDK_ROOT environment variable: {self._sdk_home}")
self.env_info.ANDROID_SDK_ROOT = self._sdk_home
# test shall pass, so this runs also in the build as build requirement context
# ndk-build: https://developer.android.com/ndk/guides/ndk-build
self.env_info.PATH.append( self._ndk_home )
# You should use the ANDROID_NDK_ROOT environment variable to indicate where the NDK is located.
# That's what most NDK-related scripts use (inside the NDK, and outside of it).
# https://groups.google.com/g/android-ndk/c/qZjhOaynHXc
logging.info(f"Creating ANDROID_NDK_ROOT environment variable: {self._ndk_home}")
self.env_info.ANDROID_NDK_ROOT = self._ndk_home
# Gradle is complaining about the ANDROID_NDK_HOME environment variable:
# WARNING: Support for ANDROID_NDK_HOME is deprecated and will be removed in the future.
# Use android.ndkVersion in build.gradle instead.
# logging.info(f"Creating ANDROID_NDK_HOME environment variable: {self._ndk_home}")
# self.env_info.ANDROID_NDK_HOME = self._ndk_home
logging.info(f"Creating NDK_ROOT environment variable: {self._ndk_root}")
self.env_info.NDK_ROOT = self._ndk_root
logging.info(f"Creating CHOST environment variable: {self._llvm_triplet}")
self.env_info.CHOST = self._llvm_triplet
ndk_sysroot = os.path.join(self._ndk_root, 'sysroot')
logging.info(f"Creating CONAN_CMAKE_FIND_ROOT_PATH environment variable: {ndk_sysroot}")
self.env_info.CONAN_CMAKE_FIND_ROOT_PATH = ndk_sysroot
logging.info(f"Creating SYSROOT environment variable: {ndk_sysroot}")
self.env_info.SYSROOT = ndk_sysroot
logging.info(f"Creating self.cpp_info.sysroot: {ndk_sysroot}")
self.cpp_info.sysroot = ndk_sysroot
logging.info(f"Creating ANDROID_NATIVE_API_LEVEL environment variable: {self.settings_target.os.api_level}")
self.env_info.ANDROID_NATIVE_API_LEVEL = str(self.settings_target.os.api_level)
self.chmod_plus_x(os.path.join(self.package_folder, "cmake-wrapper"))
cmake_wrapper = "cmake-wrapper.cmd" if self._build_os == "Windows" else "cmake-wrapper"
cmake_wrapper = os.path.join(self.package_folder, cmake_wrapper)
logging.info(f"Creating CONAN_CMAKE_PROGRAM environment variable: {cmake_wrapper}")
self.env_info.CONAN_CMAKE_PROGRAM = cmake_wrapper
toolchain = os.path.join(self._ndk_home, "build", "cmake", "android.toolchain.cmake")
logging.info(f"Creating CONAN_CMAKE_TOOLCHAIN_FILE environment variable: {toolchain}")
self.env_info.CONAN_CMAKE_TOOLCHAIN_FILE = toolchain
self.env_info.CC = self._define_tool_var('CC', 'clang')
self.env_info.CXX = self._define_tool_var('CXX', 'clang++')
self.env_info.LD = self._define_tool_var('LD', 'ld')
self.env_info.AR = self._define_tool_var('AR', 'ar')
self.env_info.AS = self._define_tool_var('AS', 'as')
self.env_info.RANLIB = self._define_tool_var('RANLIB', 'ranlib')
self.env_info.STRIP = self._define_tool_var('STRIP', 'strip')
self.env_info.ADDR2LINE = self._define_tool_var('ADDR2LINE', 'addr2line')
self.env_info.NM = self._define_tool_var('NM', 'nm')
self.env_info.OBJCOPY = self._define_tool_var('OBJCOPY', 'objcopy')
self.env_info.OBJDUMP = self._define_tool_var('OBJDUMP', 'objdump')
self.env_info.READELF = self._define_tool_var('READELF', 'readelf')
self.env_info.ELFEDIT = self._define_tool_var('ELFEDIT', 'elfedit')
self.env_info.ANDROID_PLATFORM = f"android-{self.settings_target.os.api_level}"
self.env_info.ANDROID_TOOLCHAIN = "clang"
self.env_info.ANDROID_ABI = self._android_abi
self.env_info.ANDROID_STL = f"{self.options.android_stl}"
# set the stl shared lib path if specified by the android_stl option
if self.options.android_stl == "c++_shared":
self.env_info.ANDROID_STL_SHARED_LIB = f"{os.path.join(ndk_sysroot, 'usr', 'lib', self._llvm_triplet, 'libc++_shared.so')}"
logging.info(f"Creating ANDROID_STL_SHARED_LIB environment variable: {self.env_info.ANDROID_STL_SHARED_LIB}")
self.env_info.CMAKE_FIND_ROOT_PATH_MODE_PROGRAM = "BOTH"
self.env_info.CMAKE_FIND_ROOT_PATH_MODE_LIBRARY = "BOTH"
self.env_info.CMAKE_FIND_ROOT_PATH_MODE_INCLUDE = "BOTH"
self.env_info.CMAKE_FIND_ROOT_PATH_MODE_PACKAGE = "BOTH"
| 2.125
| 2
|
print_text/add.py
|
ErraticO/test_github_release_pypi
| 2
|
12777171
|
def make():
print("add")
| 1.375
| 1
|
multiformats/multicodec/__init__.py
|
hashberg-io/multiformats
| 1
|
12777172
|
"""
Implementation of the `multicodec spec <https://github.com/multiformats/multicodec>`_.
Suggested usage:
>>> from multiformats import multicodec
"""
import importlib.resources as importlib_resources
from io import BufferedIOBase
import json
import re
import sys
from typing import AbstractSet, Any, cast, Dict, Iterable, Iterator, Mapping, Optional, overload, Set, Sequence, Tuple, Type, TypeVar, Union
from typing_extensions import Literal
from typing_validation import validate
from multiformats import varint
from multiformats.varint import BytesLike
# from . import err
from .err import MulticodecKeyError, MulticodecValueError
def _hexcode(code: int) -> str:
hexcode = hex(code)
if len(hexcode) % 2 != 0:
hexcode = "0x0"+hexcode[2:]
return hexcode
class Multicodec:
"""
Container class for a multicodec.
Example usage:
>>> Multicodec(**{
... 'name': 'cidv1', 'tag': 'cid', 'code': '0x01',
... 'status': 'permanent', 'description': 'CIDv1'})
Multicodec(name='cidv1', tag='cid', code=1,
status='permanent', description='CIDv1')
:param name: the multicodec name
:type name: :obj:`str`
:param tag: the multicodec tag
:type tag: :obj:`str`
:param code: the multicodec code, as integer or ``0xYZ`` hex-string
:type code: :obj:`int` or :obj:`str`
:param status: the multicodec status
:type status: ``'draft'`` or ``'permanent'``, *optional*
:param description: the multicodec description
:type description: :obj:`str`, *optional*
"""
_name: str
_tag: str
_code: int
_status: Literal["draft", "permanent"]
_description: str
__slots__ = ("__weakref__", "_name", "_tag", "_code", "_status", "_description")
def __init__(self, *,
name: str,
tag: str,
code: Union[int, str],
status: str = "draft",
description: str = ""
):
for arg in (name, tag, status, description):
validate(arg, str)
validate(code, Union[int, str])
name = Multicodec._validate_name(name)
code = Multicodec.validate_code(code)
status = Multicodec._validate_status(status)
self._name = name
self._tag = tag
self._code = code
self._status = status
self._description = description
@staticmethod
def _validate_name(name: str) -> str:
if not re.match(r"^[a-z][a-z0-9_-]+$", name):
raise MulticodecValueError(f"Invalid multicodec name {repr(name)}")
return name
@staticmethod
def validate_code(code: Union[int, str]) -> int:
"""
Validates a multicodec code and transforms it to unsigned integer format (if in hex format).
:param code: the multicodec code, as integer or `0xYZ` hex-string
:type code: :obj:`int` or :obj:`str`
:raises ValueError: if the code is invalid
"""
if isinstance(code, str):
if code.startswith("0x"):
code = code[2:]
code = int(code, base=16)
if code < 0:
raise MulticodecValueError(f"Invalid multicodec code {repr(code)}.")
return code
@staticmethod
def _validate_status(status: str) -> Literal["draft", "permanent"]:
if status not in ("draft", "permanent"):
raise MulticodecValueError(f"Invalid multicodec status {repr(status)}.")
return cast(Literal["draft", "permanent"], status)
@property
def name(self) -> str:
"""
Multicodec name. Must satisfy the following:
.. code-block:: python
re.match(r"^[a-z][a-z0-9_-]+$", name)
"""
return self._name
@property
def tag(self) -> str:
""" Multicodec tag. """
return self._tag
@property
def code(self) -> int:
""" Multicodec code. Must be a non-negative integer. """
return self._code
@property
def hexcode(self) -> str:
"""
Multicodec code as a hex string (with hex digits zero-padded to even length):
Example usage:
>>> m = multicodec.get(1)
>>> m.code
1
>>> m.hexcode
'0x01'
"""
return _hexcode(self._code)
@property
def status(self) -> Literal["draft", "permanent"]:
""" Multicodec status. """
return self._status
@property
def description(self) -> str:
""" Multicodec description. """
return self._description
@property
def is_private_use(self) -> bool:
"""
Whether this multicodec code is reserved for private use,
i.e. whether it is in ``range(0x300000, 0x400000)``.
"""
return self.code in range(0x300000, 0x400000)
def wrap(self, raw_data: BytesLike) -> bytes:
"""
Wraps raw binary data into multicodec data:
.. code-block:: console
<raw data> --> <code><raw data>
Example usage:
>>> ip4 = multicodec.get("ip4")
>>> ip4
Multicodec(name='ip4', tag='multiaddr', code='0x04', status='permanent', description='')
>>> raw_data = bytes([192, 168, 0, 254])
>>> multicodec_data = ip4.wrap(raw_data)
>>> raw_data.hex()
'c0a800fe'
>>> multicodec_data.hex()
'04c0a800fe'
>>> varint.encode(0x04).hex()
'04' # 0x04 ^^^^ is the multicodec code for 'ip4'
:param raw_data: the raw data to be wrapped
:type raw_data: :obj:`~multiformats.varint.BytesLike`
:raise ValueError: see :func:`~multiformats.varint.encode`
"""
return varint.encode(self.code)+raw_data
def unwrap(self, multicodec_data: BytesLike) -> bytes:
"""
Unwraps multicodec binary data to raw data:
.. code-block::
<code><raw data> --> <raw data>
Additionally checks that the code listed by the data
matches the code of this multicodec.
Example usage:
>>> multicodec_data = bytes.fromhex("c0a800fe")
>>> raw_data = ip4.unwrap(multicodec_data)
>>> multicodec_data.hex()
'04c0a800fe'
>>> raw_data.hex()
'c0a800fe'
>>> varint.encode(0x04).hex()
'04' # 0x04 ^^^^ is the multicodec code for 'ip4'
:param multicodec_data: the multicodec data to be unwrapped
:type multicodec_data: :obj:`~multiformats.varint.BytesLike`
:raise ValueError: if the unwrapped multicodec code does not match this multicodec's code
:raise ValueError: see :func:`multiformats.multicodec.unwrap_raw`
:raise KeyError: see :func:`multiformats.multicodec.unwrap_raw`
"""
code, _, raw_data = unwrap_raw(multicodec_data)
# code, _, raw_data = varint.decode_raw(multicodec_data)
if code != self.code:
hexcode = _hexcode(code)
raise MulticodecValueError(f"Found code {hexcode} when unwrapping data, expected code {self.hexcode}.")
return bytes(raw_data)
def to_json(self) -> Mapping[str, str]:
"""
Returns a JSON dictionary representation of this multicodec object.
Example usage:
>>> m = multicodec.get(1)
>>> m.to_json()
{'name': 'cidv1', 'tag': 'cid', 'code': '0x01',
'status': 'permanent', 'description': 'CIDv1'}
"""
return {
"name": self.name,
"tag": self.tag,
"code": self.hexcode,
"status": self.status,
"description": self.description
}
def __str__(self) -> str:
if exists(self.name) and get(self.name) == self:
return f"multicodec({repr(self.name)}, tag={repr(self.tag)})"
return repr(self)
def __repr__(self) -> str:
return f"Multicodec({', '.join(f'{k}={repr(v)}' for k, v in self.to_json().items())})"
@property
def _as_tuple(self) -> Tuple[Type["Multicodec"], str, str, int, Literal["draft", "permanent"]]:
return (Multicodec, self.name, self.tag, self.code, self.status)
def __hash__(self) -> int:
return hash(self._as_tuple)
def __eq__(self, other: Any) -> bool:
if self is other:
return True
if not isinstance(other, Multicodec):
return NotImplemented
return self._as_tuple == other._as_tuple
def get(name: Optional[str] = None, *, code: Optional[int] = None) -> Multicodec:
"""
Gets the multicodec with given name or code.
Example usage:
>>> multicodec.get("identity")
Multicodec(name='identity', tag='multihash', code=0,
status='permanent', description='raw binary')
>>> multicodec.get(code=0x01)
Multicodec(name='cidv1', tag='ipld', code=1,
status='permanent', description='CIDv1')
:param name: the multicodec name
:type name: :obj:`str` or :obj:`None`, *optional*
:param code: the multicodec code
:type code: :obj:`int` or :obj:`None`, *optional*
:raises KeyError: if no such multicodec exists
:raises ValueError: unless exactly one of ``name`` and ``code`` is specified
"""
validate(name, Optional[str])
validate(code, Optional[int])
if (name is None) == (code is None):
raise MulticodecValueError("Must specify exactly one between 'name' and 'code'.")
if name is not None:
if name not in _name_table:
raise MulticodecKeyError(f"No multicodec named {repr(name)}.")
return _name_table[name]
if code not in _code_table:
raise MulticodecKeyError(f"No multicodec with code {repr(code)}.")
return _code_table[code]
def multicodec(name: str, *, tag: Optional[str] = None) -> Multicodec:
"""
An alias for :func:`get`, for use with multicodec name only.
If a tag is passed, ensures that the multicodec tag matches the one given.
Example usage:
>>> from multiformats.multicodec import multicodec
>>> multicodec("identity")
Multicodec(name='identity', tag='multihash', code=0,
status='permanent', description='raw binary')
:param name: the multicodec name
:type name: :obj:`str`
:param tag: the optional multicodec tag
:type tag: :obj:`str` or :obj:`None`, *optional*
:raises KeyError: see :func:`get`
"""
codec = get(name)
if tag is not None and codec.tag != tag:
raise MulticodecKeyError(f"Multicodec {repr(name)} exists, but its tag is not {repr(tag)}.")
return codec
def exists(name: Union[None, str, Multicodec] = None, *, code: Optional[int] = None) -> bool:
"""
Checks whether there is a multicodec with the given name or code.
Example usage:
>>> multicodec.exists("identity")
True
>>> multicodec.exists(code=0x01)
True
:param name: the multicodec name
:type name: :obj:`str` or :obj:`None`, *optional*
:param code: the multicodec code
:type code: :obj:`int` or :obj:`None`, *optional*
:raises ValueError: unless exactly one of ``name`` and ``code`` is specified
"""
validate(name, Optional[str])
validate(code, Optional[int])
if (name is None) == (code is None):
raise MulticodecValueError("Must specify exactly one between 'name' and 'code'.")
if name is not None:
return name in _name_table
return code in _code_table
def wrap(codec: Union[str, int, Multicodec], raw_data: BytesLike) -> bytes:
"""
Wraps raw binary data into multicodec data:
.. code-block::
<raw data> --> <code><raw data>
Example usage:
>>> raw_data = bytes([192, 168, 0, 254])
>>> multicodec_data = multicodec.wrap("ip4", raw_data)
>>> raw_data.hex()
'c0a800fe'
>>> multicodec_data.hex()
'04c0a800fe'
>>> varint.encode(0x04).hex()
'04' # 0x04 ^^^^ is the multicodec code for 'ip4'
:param codec: the multicodec that the raw data refers to
:type codec: :obj:`str`, :obj:`int` or :class:`Multicodec`
:param raw_data: the raw binary data
:type raw_data: :obj:`~multiformats.varint.BytesLike`
:raises KeyError: see :func:`get`
"""
if isinstance(codec, str):
codec = get(codec)
elif isinstance(codec, int):
codec = get(code=codec)
else:
validate(codec, Multicodec)
return codec.wrap(raw_data)
def unwrap(multicodec_data: BytesLike) -> Tuple[Multicodec, bytes]:
"""
Unwraps multicodec binary data to multicodec and raw data:
Example usage:
>>> multicodec_data = bytes.fromhex("04c0a800fe")
>>> codec, raw_data = multicodec.unwrap(multicodec_data)
>>> multicodec_data.hex()
'04c0a800fe'
>>> raw_data.hex()
'c0a800fe'
>>> codec
Multicodec(name='ip4', tag='multiaddr', code='0x04', status='permanent', description='')
:param multicodec_data: the binary data prefixed with multicodec code
:type multicodec_data: :obj:`~multiformats.varint.BytesLike`
:raises KeyError: if the code does not correspond to a know multicodec
"""
code, _, raw_data = unwrap_raw(multicodec_data)
return get(code=code), bytes(raw_data)
_BufferedIOT = TypeVar("_BufferedIOT", bound=BufferedIOBase)
@overload
def unwrap_raw(multicodec_data: BytesLike) -> Tuple[int, int, memoryview]:
...
@overload
def unwrap_raw(multicodec_data: _BufferedIOT) -> Tuple[int, int, _BufferedIOT]:
...
def unwrap_raw(multicodec_data: Union[BytesLike, BufferedIOBase]) -> Tuple[int, int, Union[memoryview, BufferedIOBase]]:
"""
Similar to :func:`unwrap`, but returns a triple of multicodec code, number of bytes read and remaining bytes.
Example usage:
>>> multicodec_data = bytes.fromhex("04c0a800fe")
>>> code, num_bytes_read, remaining_bytes = multicodec.unwrap_raw(multicodec_data)
>>> code
4
>>> num_bytes_read
1
>>> remaining_bytes
<memory at 0x000001BE46B17640>
>>> multicodec_data.hex()
'04c0a800fe'
>>> bytes(remaining_bytes).hex()
'c0a800fe'
:param multicodec_data: the binary data prefixed with multicodec code
:type multicodec_data: :obj:`~multiformats.varint.BytesLike`
:raises KeyError: if the code does not correspond to a know multicodec
"""
code, n, raw_data = varint.decode_raw(multicodec_data)
if not exists(code=code):
raise MulticodecKeyError(f"No multicodec is known with unwrapped code {_hexcode(code)}.")
return code, n, raw_data
def validate_multicodec(codec: Multicodec) -> None:
"""
Validates an instance of :class:`Multicodec`.
If the multicodec is registered (i.e. valid), no error is raised.
:param codec: the instance to be validated
:type codec: :class:`Multicodec`
:raises KeyError: if no multicodec with the given name is registered
:raises ValueError: if a multicodec with the given name is registered, but is different from the one given
"""
validate(codec, Multicodec)
mc = get(codec.name)
if mc != codec:
raise MulticodecValueError(f"Multicodec named {codec.name} exists, but is not the one given.")
def register(codec: Multicodec, *, overwrite: bool = False) -> None:
"""
Registers a given multicodec.
Example usage:
>>> m = Multicodec("my-multicodec", "my-tag", 0x300001, "draft", "...")
>>> multicodec.register(m)
>>> multicodec.exists(code=0x300001)
True
>>> multicodec.get(code=0x300001).name
'my-multicodec'
>>> multicodec.get(code=0x300001).is_private_use
True
:param codec: the multicodec to register
:type codec: :class:`Multicodec`
:param overwrite: whether to overwrite a multicodec with existing code (optional, default :obj:`False`)
:type overwrite: :obj:`bool`, *optional*
:raises ValueError: if ``overwrite`` is :obj:`False` and a multicodec with the same name or code already exists
:raises ValueError: if ``overwrite`` is :obj:`True` and a multicodec with the same name but different code already exists
"""
validate(codec, Multicodec)
validate(overwrite, bool)
if not overwrite and codec.code in _code_table:
raise MulticodecValueError(f"Multicodec with code {repr(codec.code)} already exists: {_code_table[codec.code]}")
if codec.name in _name_table and _name_table[codec.name].code != codec.code:
raise MulticodecValueError(f"Multicodec with name {repr(codec.name)} already exists: {_name_table[codec.name]}")
_code_table[codec.code] = codec
_name_table[codec.name] = codec
def unregister(name: Optional[str] = None, *, code: Optional[int] = None) -> None:
"""
Unregisters the multicodec with given name or code.
Example usage:
>>> multicodec.unregister(code=0x01) # cidv1
>>> multicodec.unregister(code=0x01)
False
:param name: the multicodec name
:type name: :obj:`str` or :obj:`None`, *optional*
:param code: the multicodec code
:type code: :obj:`int` or :obj:`None`, *optional*
:raises KeyError: if no such multicodec exists
:raises ValueError: unless exactly one of ``name`` and ``code`` is specified
"""
m = get(name, code=code)
del _code_table[m.code]
del _name_table[m.name]
def table(*,
tag: Union[None, str, AbstractSet[str], Sequence[str]] = None,
status: Union[None, str, AbstractSet[str], Sequence[str]] = None) -> Iterator[Multicodec]:
"""
Iterates through the registered multicodecs, in order of ascending code.
Example usage:
>>> len(list(multicodec.table())) # multicodec.table() returns an iterator
482
>>> selected = multicodec.table(tag=["cid", "cid", "multiaddr"], status="permanent")
>>> [m.code for m in selected]
[1, 4, 6, 41, 53, 54, 55, 56, 81, 85, 112, 113, 114, 120,
144, 145, 146, 147, 148, 149, 150, 151, 152, 176, 177,
178, 192, 193, 290, 297, 400, 421, 460, 477, 478, 479, 512]
:param tag: one or more tags to be selected (if :obj:`None`, all tags are included)
:type tag: :obj:`None`, :obj:`str`, set or sequence of :obj:`str`, *optional*
:param status: one or more statuses to be selected (if :obj:`None`, all statuses are included)
:type status: :obj:`None`, :obj:`str`, set or sequence of :obj:`str`, *optional*
"""
validate(tag, Union[None, str, AbstractSet[str], Sequence[str]])
validate(status, Union[None, str, AbstractSet[str], Sequence[str]])
tags: Union[None, AbstractSet[str], Sequence[str]]
if tag is None:
tags = None
elif isinstance(tag, str):
tags = [tag]
else:
tags = tag
statuses: Union[None, AbstractSet[str], Sequence[str]]
if status is None:
statuses = None
elif isinstance(status, str):
statuses = [status]
else:
statuses = status
for code in sorted(_code_table.keys()):
m = _code_table[code]
if tags is not None and m.tag not in tags:
continue
if statuses is not None and m.status not in statuses:
continue
yield m
def build_multicodec_tables(codecs: Iterable[Multicodec], *,
allow_private_use: bool = False) -> Tuple[Dict[int, Multicodec], Dict[str, Multicodec]]:
"""
Creates code->multicodec and name->multicodec mappings from a finite iterable of multicodecs,
returning the mappings.
Example usage:
>>> code_table, name_table = build_multicodec_tables(codecs)
:param codecs: multicodecs to be registered
:type codecs: iterable of :class:`Multicodec`
:param allow_private_use: whether to allow multicodec entries with private use codes in ``range(0x300000, 0x400000)`` (default :obj:`False`)
:type allow_private_use: :obj:`bool`, *optional*
:raises ValueError: if ``allow_private_use`` and a multicodec with private use code is encountered
:raises ValueError: if the same multicodec code is encountered multiple times, unless exactly one of the multicodecs
has permanent status (in which case that codec is the one inserted in the table)
:raises ValueError: if the same name is encountered multiple times
"""
# validate(codecs, Iterable[Multicodec]) # TODO: not yet properly supported by typing-validation
validate(allow_private_use, bool)
code_table: Dict[int, Multicodec] = {}
name_table: Dict[str, Multicodec] = {}
overwritten_draft_codes: Set[int] = set()
for m in codecs:
if not allow_private_use and m.is_private_use:
raise MulticodecValueError(f"Private use multicodec not allowed: {m}")
if m.code in code_table:
if code_table[m.code].status == "permanent":
if m.status == "draft":
# this draft code has been superseded by a permanent one, skip it
continue
raise MulticodecValueError(f"Multicodec code {m.hexcode} appears multiple times in table.")
if m.status != "permanent":
# overwriting draft code with another draft code: dodgy, need to check at the end
overwritten_draft_codes.add(m.code)
code_table[m.code] = m
if m.name in name_table:
raise MulticodecValueError(f"Multicodec name {m.name} appears multiple times in table.")
name_table[m.name] = m
for code in overwritten_draft_codes:
m = code_table[code]
if m.status != "permanent":
raise MulticodecValueError(f"Code {m.code} appears multiple times in table, "
"but none of the associated multicodecs is permanent.")
return code_table, name_table
# Create the global code->multicodec and name->multicodec mappings.
_code_table: Dict[int, Multicodec]
_name_table: Dict[str, Multicodec]
with importlib_resources.open_text("multiformats.multicodec", "multicodec-table.json") as _table_f:
_table_json = json.load(_table_f)
_code_table, _name_table = build_multicodec_tables(Multicodec(**row) for row in _table_json)
| 2.875
| 3
|
k8svimdriver/service/k8s.py
|
accanto-systems/k8s-vim-driver
| 0
|
12777173
|
from ignition.service.config import ConfigurationPropertiesGroup
class K8sProperties(ConfigurationPropertiesGroup):
def __init__(self):
super().__init__('k8s')
self.tmpdir = "./"
| 1.6875
| 2
|
blogger_tests.py
|
cjhang/blogger
| 0
|
12777174
|
<gh_stars>0
# -*- coding: utf-8 -*-
import os
import blogger
import unittest
import tempfile
class FlaskTestCase(unittest.TestCase):
def setUp(self):
self.db_fd, blogger.app.config['DATABASE'] = tempfile.mkstemp()
blogger.app.config['TESTING'] = True
self.app = blogger.app.test_client()
with blogger.app.app_context():
blogger.init_db()
def tearDown(self):
os.close(self.db_fd)
os.unlink(blogger.app.config['DATABASE'])
def test_empty_db(self):
rv = self.app.get('/')
assert b'No blogs here so far' in rv.data
def test_displayblogs(self):
rv = self.app.post('/add', data = dict(
name = 'test1',
title_zh = '测试1',
author = 'test_author',
release = '2010-10-06',
tags = 'test',
abstract = 'Test1 abstract',
content = 'This is a test blog content.'))
rv = self.app.get('/')
assert b'No blogs here so far' not in rv.data
assert b'测试1' in rv.data
assert b'Test1 abstract' in rv.data
rv = self.app.get('/blogs/test1')
assert b'测试1' in rv.data
assert b'2010-10-06' in rv.data
assert b'This is a test blog content.' in rv.data
assert b'No comments so far' in rv.data
rv = self.app.post('/blogs/test1', data = dict(
user_name = 'tester',
comment_detail = 'What a good blog!'))
rv = self.app.get('/blogs/test1')
assert b'What a good blog!' in rv.data
if __name__ == '__main__':
unittest.main()
| 2.5
| 2
|
Test_h5.py
|
philippgualdi/PyQMRI
| 0
|
12777175
|
<filename>Test_h5.py
import h5py
filename = "../VFA_phantom_8.h5"
h5 = h5py.File(filename, 'a')
if 'Coils' in h5:
Coils = h5['Coils'] # VSTOXX futures data
print(Coils)
del h5['Coils']
print(list(h5.keys()))
if 'flip_angle(s)' in h5:
print("Flip angle exists")
data = h5['flip_angle(s)']
del h5['flip_angle(s)']
h5['fa'] = data
h5.close()
| 2.359375
| 2
|
kendall.py
|
Sharingsky/FORMERAMC
| 0
|
12777176
|
<reponame>Sharingsky/FORMERAMC
from pandas import DataFrame
import pandas as pd
x = [10-i for i in range(10)]
y = [8,2,9,3,5,10,1,4,7,6]
data = DataFrame({'x':x,'y':y})
print(data.head())
kend=data.corr(method='kendall')
print(kend)
| 2.734375
| 3
|
src/djangoreactredux/djrenv/lib/python3.5/site-packages/prospector/exceptions.py
|
m2jobe/c_x
| 1
|
12777177
|
# -*- coding: utf-8 -*-
# We are trying to handle pylint changes in their exception classes
try:
# pylint < 1.7
from pylint.utils import UnknownMessage as UnknownMessageError
except ImportError:
# pylint >= 1.7
from pylint.exceptions import UnknownMessageError
class FatalProspectorException(Exception):
"""
Exception used to indicate an internal prospector problem.
Problems in prospector itself should raise this to notify
the user directly. Errors in dependent tools which should
be squashed and the user notified elegantly.
"""
def __init__(self, message):
self.message = message
| 2.546875
| 3
|
ejemplo13/main.py
|
JandroGC/curso_micropython
| 0
|
12777178
|
<gh_stars>0
# Ejemplo 13, encendido de iluminacion con
# Bluetooth de baja energía (BLE)
# Bluetooth Low Energy
# Autor: <NAME>
# Marzo 2022
# Importamos las librerías necesarias
from machine import Pin, Timer
from time import sleep_ms, sleep
import ubluetooth
# Creamos una clase BLE que es la que
# gestionará la comunicacion bluetooth
# serie con el disposivo móvil, enviando
# y recibiendo mensajes.
class BLE():
'''
Clase BLE que conecta por BLE la placa ESP32
con un dispositivo móvil.
Atributos:
ble = objeto de tipo BLE de la librería
bluetooth
led = diodo led que parpadea cuando no se
ha establecido conexion y pasa a
encendido fijo cuando está conectado
Métodos:
ble_irq = contiene los eventos que utilizamos
en el programa. Son los cuatro mensajes
que al ser recibidos producen cambios en
las salidas.
connected = encendido fijo en led de señalizacion
cuando BLE está conectado
disconnected = parpadeo en led cuando BLE desconectado
send = envia datos a través del blutooth
advertiser = Hace visible el nombre del dispositivo para
poder ser identificado.
register = maneja los registros necesarios para la
comunicación
'''
def __init__(self, name):
print('se ha creado objeto BLE')
self.name = name
self.ble = ubluetooth.BLE()
self.ble.active(True)
self.led = Pin(2, Pin.OUT)
self.timer1 = Timer(0)
self.timer2 = Timer(1)
self.disconnected()
self.ble.irq(self.ble_irq)
self.register()
self.advertiser()
def connected(self):
self.timer1.deinit()
self.timer2.deinit()
def disconnected(self):
self.timer1.init(period=1000, mode=Timer.PERIODIC, callback=lambda t: self.led(1))
sleep_ms(200)
self.timer2.init(period=1000, mode=Timer.PERIODIC, callback=lambda t: self.led(0))
def ble_irq(self, event, data):
if event == 1:
'''Central disconnected'''
self.connected()
self.led(1)
elif event == 2:
'''Central disconnected'''
self.advertiser()
self.disconnected()
elif event == 3:
'''Nuevo mensaje recibido'''
buffer = self.ble.gatts_read(self.rx)
message = buffer.decode('UTF-8').strip()
print(message)
if message == 'luz1':
luz1.value(not luz1.value())
print('Luz 1...', luz1.value())
ble.send('Luz 1...' + str(luz1.value()))
if message == 'luz2':
luz2.value(not luz2.value())
print('Luz 2...', luz2.value())
ble.send('Luz 2...' + str(luz2.value()))
if message == 'luz3':
luz3.value(not luz3.value())
print('Luz 3...', luz3.value())
ble.send('Luz 3...' + str(luz3.value()))
if message == 'luz4':
luz4.value(not luz4.value())
print('Luz 4...', luz4.value())
ble.send('Luz 4...' + str(luz4.value()))
def register(self):
# Nordic UART Service (NUS)
NUS_UUID = '6E400001-B5A3-F393-E0A9-E50E24DCCA9E'
RX_UUID = '6E400002-B5A3-F393-E0A9-E50E24DCCA9E'
TX_UUID = '6E400003-B5A3-F393-E0A9-E50E24DCCA9E'
BLE_NUS = ubluetooth.UUID(NUS_UUID)
BLE_RX = (ubluetooth.UUID(RX_UUID), ubluetooth.FLAG_WRITE)
BLE_TX = (ubluetooth.UUID(TX_UUID), ubluetooth.FLAG_NOTIFY)
BLE_UART = (BLE_NUS, (BLE_TX, BLE_RX,))
SERVICES = (BLE_UART, )
((self.tx, self.rx,), ) = self.ble.gatts_register_services(SERVICES)
def send(self, data):
self.ble.gatts_notify(0, self.tx, data + '\n')
def advertiser(self):
name = bytes(self.name, 'UTF-8')
self.ble.gap_advertise(100, bytearray('\x02\x01\x02') + bytearray((len(name) + 1, 0x09)) + name)
# Prueba
# Definimos los pines para las salidas
# en las que están conectados los relés
luz1 = Pin(12, Pin.OUT)
luz2 = Pin(14, Pin.OUT)
luz3 = Pin(27, Pin.OUT)
luz4 = Pin(26, Pin.OUT)
# Creamos el objeto de tipo BLE
# que al funcionar por interrupciones
# estará continuamente escuchando
ble = BLE("ESP32")
while True:
print ('Bluetooth')
sleep(5)
| 3.15625
| 3
|
classResults.py
|
Mulugruntz/Report-Tool
| 0
|
12777179
|
<reponame>Mulugruntz/Report-Tool
from decimal import Decimal, DivisionByZero
from typing import Dict, List, Tuple
import numpy as np
from collections import OrderedDict
import funcMisc
# TODO: is it needed to subclass dict? Especially for one huge method!
class TradesResults(dict):
"""
Class with method to calculates
summary and equity plots about trades
"""
def __init__(self):
self.dict_results = dict
@staticmethod
def calculate_transactions_of_types(
transactions: Dict, types: List[str]
) -> Tuple[List[Decimal], Decimal]:
out_list = [
Decimal(trade["pnl"])
for trade in transactions.values()
if trade["type"] in types
]
out_total = round(Decimal(sum(out_list)), 2)
return out_list, out_total
def calculate_result(
self,
transactions: Dict,
start_capital: Decimal,
cash_available: Decimal,
screenshot: bool,
) -> Dict:
"""
Calculate summary about trades. For infos calculated
see summary_headers. Summary can be in points,
points/lot, % or currency Return a dict with formatted
string and a dict with np array to plot equity curves
transactions and start_capital can be modified by
user's choices so they are returned too
:kw param transactions: OredredDict with transactions
:kw param cash_available: string cash
available on current account
:kw param start_capital: float capital initial set by user
:kw param screenshot: boolean inform if screenshot is being
taken to properly format infos
"""
config = funcMisc.read_config() # load config file
currency_symbol = config["currency_symbol"]
result_in = config["result_in"]
auto_calculate = config["auto_calculate"]
include = config["include"]
state_filter = config["all"]
state_infos = config["what_to_show"]["state_infos"]
summary_headers = [
"Points won",
"Trades won",
"Points lost",
"Trades lost",
"Total points",
"Trades flat",
"Total trades",
"Avg trade",
"Profit Factor",
"Avg win",
"Capital growth",
"Avg loss",
"Max drawdown",
"Avg drawdown",
"Consec. wins",
"Consec. losses",
"Interests",
"Fees",
"Cash in/out",
"Transfers",
] # same lis as the one used to create dock
ig_config = funcMisc.read_ig_config()
"""
ig sends keywords to identify transactions type known
keywords ares stored in ig_config.json
if transaction type is unknown log it
"""
kw_order = ig_config["keyword"]["ORDER"]
kw_fees = ig_config["keyword"]["FEES"]
kw_cashin = ig_config["keyword"]["CASH_IN"]
kw_cashout = ig_config["keyword"]["CASH_OUT"]
kw_transfer = ig_config["keyword"]["TRANSFER"]
summary_dict = OrderedDict()
# -------------------------start precalculation-------------------------
# calculate all internal funds transfer
# interaccount transfer
transfer_list, total_transfer = self.calculate_transactions_of_types(
transactions, ["TRANSFER"]
)
# user's deposit
cashin_list, total_cashin = self.calculate_transactions_of_types(
transactions, ["CASHIN"]
)
# user's withdrawal
cashout_list, total_cashout = self.calculate_transactions_of_types(
transactions, ["CASHOUT"]
)
# build list with interest pnl
interest_list, total_interest = self.calculate_transactions_of_types(
transactions, ["WITH", "DEPO", "DIVIDEND"]
)
# build list with fees pnl
fee_list, total_fee = self.calculate_transactions_of_types(
transactions, ["CHART"]
)
# calculate total pnl to determine start capital
if transactions:
total_pnl = sum([Decimal(t["pnl"]) for t in transactions.values()])
# pnl minus funds transfert
if include == 2:
total_pnl = total_pnl - (total_cashin + total_cashout + total_transfer)
# pnl minus funds transfert and fees/interest
else:
total_pnl = total_pnl - (
total_cashin
+ total_cashout
+ total_transfer
+ total_fee
+ total_interest
)
else: # no data returns empy dict
for count, header in enumerate(summary_headers):
summary_dict[header] = ""
curve_args = {
"transactions": transactions,
"start_capital": start_capital,
"config": config,
}
curves_dict = self.create_curves(**curve_args)
dict_results = {
"summary": summary_dict,
"start_capital": start_capital,
"transactions": transactions,
"curves_dict": curves_dict,
}
return dict_results
# determine start capital according to user's choice
if auto_calculate == 2:
start_capital = cash_available - total_pnl
else:
cash_available = start_capital + total_pnl
capital = start_capital
# calculate growth according to start capital
for count, deal_id in enumerate(transactions.keys()):
# add nothing if trade is fund transfer
if transactions[deal_id]["type"] not in ["TRANSFER", "CASHIN", "CASHOUT"]:
# substract every pnl to cash available
capital += transactions[deal_id]["pnl"]
# recalculate growth
try:
growth = round(((capital - start_capital) / start_capital) * 100, 2)
except ZeroDivisionError:
growth = 0
# change growth key in transactions
transactions[deal_id]["growth"] = str(growth)
# build list with pnl in currency
pnl_currency_list = [
Decimal(transactions[trade]["pnl"])
for trade in transactions.keys()
if transactions[trade]["type"] in kw_order
]
# build list with pnl in points
points_list = [
Decimal(transactions[trade]["points"])
for trade in transactions.keys()
if transactions[trade]["type"] in kw_order
]
# build list with pnl in points/lot
points_lot_list = [
Decimal(transactions[trade]["points_lot"])
for trade in transactions.keys()
if transactions[trade]["type"] in kw_order
]
# lists with pnl in currency
pnl_won_list = [pnl for pnl in pnl_currency_list if pnl > 0]
pnl_loss_list = [pnl for pnl in pnl_currency_list if pnl < 0]
pnl_flat_list = [pnl for pnl in pnl_currency_list if pnl == 0]
# list with pnl in points
points_won_list = [points for points in points_list if points > 0]
points_loss_list = [points for points in points_list if points < 0]
# list with won in points/lot
points_lot_won_list = [
points_lot for points_lot in points_lot_list if points_lot > 0
]
points_lot_loss_list = [
points_lot for points_lot in points_lot_list if points_lot < 0
]
money_won = sum(pnl_won_list)
money_lost = sum(pnl_loss_list)
"""
if users want to calculate summary with
all profit/loss including fess/interest
"""
if include == 2:
total_pnl_currency = round(
(money_won + money_lost + total_interest + total_fee), 2
)
if total_interest > 0: # TODO: Does this make sense?
money_won = round(money_won + total_interest, 2)
else:
money_lost = round(money_lost + total_interest + total_fee, 2)
else:
# calculate totals in currency
total_pnl_currency = round((money_won + money_lost), 2)
money_won = round(money_won, 2)
money_lost = round(money_lost, 2)
# stats in points
points_lost = sum(points_loss_list)
points_won = sum(points_won_list)
total_pnl = round((points_won + points_lost), 2)
# stats in points/lot
points_lot_lost = sum(points_lot_loss_list)
points_lot_won = sum(points_lot_won_list)
total_pnl_lot = round((points_lot_won + points_lot_lost), 2)
# stats about nb trades
nb_trades = Decimal(len(pnl_currency_list))
nb_trades_flat = Decimal(len(pnl_flat_list))
nb_trades_lost = Decimal(len(pnl_loss_list))
nb_trades_won = Decimal(len(pnl_won_list))
i = 0
j = 0
conseq_won_list = [0]
conseq_loss_list = [0]
# stats about consequtive win/loss
for _ in pnl_currency_list:
conseq_loss = 0
conseq_won = 0
j = 0
try:
pnl = pnl_currency_list[i] # get pnl
except IndexError:
break
if pnl > 0:
try:
while pnl > 0:
conseq_won += 1 # increment conseq wons
j += 1
pnl = pnl_currency_list[i + j] # get next pnl
conseq_won_list.append(conseq_won)
i += j # get pnl after last won
except IndexError:
conseq_won_list.append(conseq_won) # r eached end of list
i = j - 1
elif pnl < 0:
try:
while pnl < 0:
conseq_loss += 1 # increment conseq losses
j += 1
pnl = pnl_currency_list[i + j] # get next pnl
conseq_loss_list.append(conseq_loss)
i += j # get pnl after last loss
except IndexError:
conseq_loss_list.append(conseq_loss) # reached end of list
i = j - 1
elif pnl == 0:
i += 1 # trade flat get next one
conseq_won = max(conseq_won_list)
conseq_loss = max(conseq_loss_list)
# manage zero division error
try:
profit_factor = abs(round(money_won / money_lost, 2))
except DivisionByZero:
profit_factor = "N/A"
try:
per_cent_trades_won = round((nb_trades_won / nb_trades) * 100, 2)
except DivisionByZero:
per_cent_trades_won = "N/A"
try:
per_cent_trades_lost = round((nb_trades_lost / nb_trades) * 100, 2)
except DivisionByZero:
per_cent_trades_lost = "N/A"
try:
per_cent_trades_flat = round((nb_trades_flat / nb_trades) * 100, 2)
except DivisionByZero:
per_cent_trades_flat = "N/A"
try:
growth = round((cash_available - start_capital) / start_capital * 100, 2)
except DivisionByZero:
growth = "N/A"
# --------------------------end precalculation--------------------------
# ------------------------start main calculation------------------------
interest_text = f"xxx {currency_symbol}"
fee_text = f"xxx {currency_symbol}"
if result_in not in ["%", "Points", "Points/lot"]:
result_in = "currency"
if result_in == "Points":
total_in = total_pnl
won_in = points_won
loss_in = points_lost
# prepare dd calculation
pnl_array = np.insert(np.asfarray(points_list), 0, 0)
pnl_cumsum = np.cumsum(pnl_array)
dd_array = np.maximum.accumulate(pnl_cumsum) - pnl_cumsum
dd_to_list = [round(Decimal(f), 2) for f in np.ndarray.tolist(dd_array)]
dd_list = [dd for dd in dd_to_list if dd > 0]
"""
force interest and fees to be displayed
in currency as points are not available
"""
interest_text = f"{total_interest} {currency_symbol}"
fee_text = f"{total_fee} {currency_symbol}"
result_in = "pts" # prettier string for result_in
elif result_in == "Points/lot":
try:
total_in = round(total_pnl_lot / nb_trades, 2)
except DivisionByZero:
total_in = 0
try:
won_in = round(points_lot_won / nb_trades_won, 2)
except DivisionByZero:
won_in = 0
try:
loss_in = round(points_lot_lost / nb_trades_lost, 2)
except DivisionByZero:
loss_in = 0
# prepare dd calculation
pnl_array = np.insert(np.asfarray(points_lot_list), 0, 0)
pnl_cumsum = np.cumsum(pnl_array)
dd_array = np.maximum.accumulate(pnl_cumsum) - pnl_cumsum
dd_to_list = [round(Decimal(f), 2) for f in np.ndarray.tolist(dd_array)]
dd_list = [dd for dd in dd_to_list if dd > 0]
"""
force interest and fees to be displayed
in currency as points are not available
"""
interest_text = f"{total_interest} {currency_symbol}"
fee_text = f"{total_fee} {currency_symbol}"
result_in = "pts/lot" # prettier string for result_in
elif result_in == "currency":
total_in = total_pnl_currency
won_in = money_won
loss_in = money_lost
# prepare dd calculation
pnl_array = np.insert(np.asfarray(pnl_currency_list), 0, 0)
pnl_cumsum = np.cumsum(pnl_array)
dd_array = np.maximum.accumulate(pnl_cumsum) - pnl_cumsum
dd_to_list = [round(Decimal(f), 2) for f in np.ndarray.tolist(dd_array)]
dd_list = [dd for dd in dd_to_list if dd > 0]
interest_text = f"{total_interest} {currency_symbol}"
fee_text = f"{total_fee} {currency_symbol}"
result_in = currency_symbol # prettier string for result_in
elif result_in == "%":
# prepare dd calculation
pnl_array = np.insert(np.asfarray(pnl_currency_list), 0, 0)
pnl_cumsum = np.cumsum(pnl_array)
dd_array = np.maximum.accumulate(pnl_cumsum) - pnl_cumsum
dd_to_list = [round(Decimal(f), 2) for f in np.ndarray.tolist(dd_array)]
dd_list = [dd for dd in dd_to_list if dd > 0]
try:
# first calculate max_dd in money
max_dd = round(Decimal(np.amax(dd_array)), 2)
except ValueError:
max_dd = Decimal()
# calculate dd in %
try:
per_cent_avg_dd = round(
(Decimal(sum(dd_list)) / len(dd_list)) / start_capital * 100, 2,
)
per_cent_max_dd = round(max_dd / start_capital * 100, 2)
except (RuntimeWarning, DivisionByZero):
per_cent_max_dd = Decimal()
per_cent_avg_dd = Decimal()
try:
total_in = round(total_pnl_currency / start_capital * 100, 2)
won_in = round(money_won / start_capital * 100, 2)
loss_in = round(money_lost / start_capital * 100, 2)
total_interest = round((total_interest / start_capital) * 100, 2)
total_fee = round((total_fee / start_capital) * 100, 2)
except ZeroDivisionError:
total_in = Decimal()
won_in = Decimal()
loss_in = Decimal()
total_interest = Decimal()
total_fee = Decimal()
interest_text = f"{total_interest} %"
fee_text = f"{total_fee} %"
"""
set dummy array to force TypeError. I prefer doing
that way to avoid too much if/else statements
"""
dd_array = ""
# calculate avg values
try:
avg_trade = round((total_in / nb_trades), 2)
except DivisionByZero:
avg_trade = Decimal()
try:
avg_won = round((won_in / nb_trades_won), 2)
except DivisionByZero:
avg_won = Decimal()
try:
avg_loss = round((loss_in / nb_trades_lost), 2)
except DivisionByZero:
avg_loss = Decimal()
try:
max_dd = round(Decimal(np.amax(dd_array)), 2)
try:
avg_dd = round(Decimal(sum(dd_list)) / len(dd_list), 2)
except DivisionByZero: # means 0 loss
avg_dd = Decimal()
except TypeError: # means result is in %
max_dd = per_cent_max_dd
avg_dd = per_cent_avg_dd
except ValueError:
max_dd = Decimal()
avg_dd = Decimal()
# -------------------------end main calculation-------------------------
# add result_in to strings
if max_dd != 0:
max_dd_text = f"{-max_dd} {result_in}"
avg_dd_text = f"{-avg_dd} {result_in}"
else:
max_dd_text = f"{max_dd} {result_in}"
avg_dd_text = f"{avg_dd} {result_in}"
avg_trade_text = f"{avg_trade} {result_in}"
avg_won_text = f"{avg_won} {result_in}"
avg_loss_text = f"{avg_loss} {result_in}"
# add % values in parenthesis
nb_trades_won_text = f"{nb_trades_won} ({per_cent_trades_won}%)"
nb_trades_loss_text = f"{nb_trades_lost} ({per_cent_trades_lost}%)"
nb_trades_flat_text = f"{nb_trades_flat} ({per_cent_trades_flat}%)"
# for important values add a color scheme
profit_color = config["profit_color"]
flat_color = config["flat_color"]
loss_color = config["loss_color"]
total_color = (
profit_color
if total_in > 0
else flat_color
if total_in == 0
else loss_color
)
total_in_text = (
f"""<span style="color:{total_color}">{total_in} {result_in}</span>"""
)
won_in_text = f"{won_in} {result_in}"
loss_in_text = f"{loss_in} {result_in}"
growth_text = "N/A"
if not growth == "N/A":
growth_color = (
profit_color
if growth > 0
else flat_color
if growth == 0
else loss_color
)
growth_text = f"""<span style="color:{growth_color}">{growth} %</span>"""
"""
Hide infos about money if user wants. If a screenshot
is being taken hide it according to user choice
"""
if (
state_infos == "Always"
and result_in != currency_symbol
or state_infos == "Only for screenshot"
and screenshot == True
and result_in != currency_symbol
):
growth_text = "xx%"
# format funds transfer strings
total_cash_text = f"xxxx{currency_symbol}/xxxx{currency_symbol}"
total_transfer_text = f"xxxx{currency_symbol}"
if "pts" in result_in:
interest_text = f"xxxx{currency_symbol}"
fee_text = f"xxxx{currency_symbol}"
else:
total_cash_text = (
f"{total_cashin}{currency_symbol}/{total_cashout}{currency_symbol}"
)
total_transfer_text = f"{total_transfer}{currency_symbol}"
curve_args = {
"transactions": transactions,
"start_capital": start_capital,
"config": config,
}
# creates curves for equity plot
scatter_curves = self.create_curves(**curve_args)
# list with all infos calculated
summary_list = [
won_in_text,
nb_trades_won_text,
loss_in_text,
nb_trades_loss_text,
total_in_text,
nb_trades_flat_text,
nb_trades,
avg_trade_text,
profit_factor,
avg_won_text,
growth_text,
avg_loss_text,
max_dd_text,
avg_dd_text,
conseq_won,
conseq_loss,
interest_text,
fee_text,
total_cash_text,
total_transfer_text,
]
summary_dict = OrderedDict()
for count, header in enumerate(summary_headers):
summary_dict[header] = summary_list[count] # populate summary dict
dict_results = {
"summary": summary_dict,
"start_capital": start_capital,
"transactions": transactions,
"curves_dict": scatter_curves,
}
return dict_results
def create_curves(*args, **kwargs):
"""
Function to build scatterplot representing
max dd, depth and high and equity curves
Returns a dict buil correspond to 'curve'
subkey in graph_dict (see classIGReport)
{nameofthegraph: {equity_curve: np.array,
high: np.array,
depth: np.array,
maxdd: np.array;
}
}
:kw param transactions: OrderedDict() of all trades
:kw param start_capital: Decimal
:kw param config: dict with config saved
"""
transactions = kwargs["transactions"]
start_capital = Decimal(kwargs["start_capital"])
config = kwargs["config"]
include = config["include"]
result_in = config["result_in"]
ig_config = funcMisc.read_ig_config()
"""
ig sends keywords to identify transactions type known
keywords ares stored in ig_config.json
if transaction type is unknown log it
"""
kw_order = ig_config["keyword"]["ORDER"]
kw_fees = ig_config["keyword"]["FEES"]
kw_cashin = ig_config["keyword"]["CASH_IN"]
kw_cashout = ig_config["keyword"]["CASH_OUT"]
kw_transfer = ig_config["keyword"]["TRANSFER"]
plot_available = ["high", "depth", "maxdd"] # type of scatter
states_plot = []
# list with state of scatter to plot( show it or not)
for plot in plot_available:
state = config["what_to_show"][plot]
states_plot.append(state)
scatter_dict = {}
"""
keys correspond to those in transactions lists are
used to acces to correct data in transactions
"""
if result_in == "Points/lot":
scatter_type = ["points_lot", "pnl", "growth"]
else:
scatter_type = ["points", "pnl", "growth"]
graph_name = ["Points", "Capital", "Growth"] # tab names
for index, scatter in enumerate(scatter_type):
if not transactions: # returns empty curves if no data
scatter_data = {
"equity_curve": np.array([]),
"maxdd": (np.array([]), np.array([])),
"depth": (np.array([]), np.array([])),
"high": (np.array([]), np.array([])),
}
scatter_dict[graph_name[index]] = scatter_data
else:
# means we have to care about fees/interest
if graph_name[index] != "Points":
if include == 2:
pnl_list = [
Decimal(transactions[trade][scatter])
for trade in transactions.keys()
if transactions[trade]["type"] in kw_order
or transactions[trade]["type"] in kw_fees
]
# exclude interest/fees
else:
pnl_list = [
Decimal(transactions[trade][scatter])
for trade in transactions.keys()
if transactions[trade]["type"] in kw_order
]
# pnl_list = [0,-10, 5,2,3,8,-25,54] #dummy curves to test
if graph_name[index] == "Capital":
# insert start capital
if len(pnl_list) != 0:
pnl_list.insert(0, start_capital)
pnl_array = np.asfarray(pnl_list)
pnl_cumsum = np.cumsum(pnl_array)
else:
if len(pnl_list) != 0:
pnl_list.insert(0, 0)
pnl_array = np.asfarray(pnl_list)
pnl_cumsum = pnl_array # don"t cumsum if growth
else: # we don"t care about fees/interest
pnl_list = [
Decimal(transactions[trade][scatter])
for trade in transactions.keys()
if transactions[trade]["type"] in kw_order
]
# pnl_list = [0,-10, 5,2,3,8,-25,54] #dummy curves to test
if len(pnl_list) != 0:
pnl_list.insert(0, 0)
pnl_array = np.asfarray(pnl_list)
pnl_cumsum = np.cumsum(pnl_array)
"""
find new hight in pnl_list. new higth is when
a value isgreater than all the previous ones
"""
dd_array = np.maximum.accumulate(pnl_cumsum) - pnl_cumsum
idx_high = [
count for count, high in enumerate(dd_array) if dd_array[count] == 0
]
# list of all hights on equity curves
list_high = [pnl_cumsum[idx] for idx in idx_high]
# del the first idx, not a trade
if 0 in idx_high:
del idx_high[0]
del list_high[0]
list_depth = []
idx_depth = []
i = 0
j = 0
"""
Find depth in pnl_list. depth is the
smallest value between two hights
"""
for dd in dd_array:
try:
dd = dd_array[j]
except IndexError: # reach end of the array
break
if dd == 0.0: # means new high an equity curve ascending
j += 1 # continue iteration
# when equity curve is descending (dd != 0) find the
# ext dd == 0, meaning a new high has been made
else:
try:
i = 1
while dd != 0:
# get next dd starting from last high
dd = dd_array[j + i]
i += 1
# isolate the depth in equity curve
current_depth = pnl_cumsum[j : j + i - 1]
list_depth.append(min(current_depth))
# get index of min
idx_min = np.argmin(current_depth) + j
idx_depth.append(idx_min)
# we continue iteration from the end of depth
j = j + i
except IndexError: # reach end of the array
break
try:
idx_max_dd = [np.argmax(dd_array)]
if graph_name[index] == "Capital":
max_dd = [pnl_cumsum[idx_max_dd[0]]]
else:
max_dd = pnl_cumsum[idx_max_dd]
except ValueError:
idx_max_dd = np.array([])
max_dd = np.array([])
# when growth is not revelent send empty curves
if start_capital == 0 and graph_name[index] == "Growth":
scatter_data = {
"equity_curve": np.array([]),
"maxdd": (np.array([]), np.array([])),
"depth": (np.array([]), np.array([])),
"high": (np.array([]), np.array([])),
}
else:
# order matters to keep max dd visible
scatter_data = OrderedDict(
{
"equity_curve": pnl_cumsum,
"high": (idx_high, list_high),
"depth": (idx_depth, list_depth),
"maxdd": (idx_max_dd, max_dd),
}
)
scatter_dict[graph_name[index]] = scatter_data
return scatter_dict
| 2.96875
| 3
|
src/xbase/layout/fill/tech.py
|
skyworksinc/xbase
| 3
|
12777180
|
<reponame>skyworksinc/xbase
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 Blue Cheetah Analog Design Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module defines abstract analog mosfet template classes.
"""
from __future__ import annotations
import abc
from bag.util.immutable import Param
from bag.layout.tech import TechInfo
from ..data import LayoutInfo
class FillTech(abc.ABC):
"""An abstract class for drawing transistor related layout.
This class defines various methods use to draw layouts used by MOSBase.
Parameters
----------
tech_info : TechInfo
the TechInfo object.
"""
def __init__(self, tech_info: TechInfo) -> None:
self._tech_info = tech_info
self._fill_config = tech_info.config['fill']
@abc.abstractmethod
def get_fill_info(self, mos_type: str, threshold: str, w: int, h: int,
el: Param, eb: Param, er: Param, et: Param) -> LayoutInfo:
raise NotImplementedError('Not implemented')
@property
def tech_info(self) -> TechInfo:
return self._tech_info
@property
def mos_type_default(self) -> str:
return self._fill_config['mos_type_default']
@property
def threshold_default(self) -> str:
return self._fill_config['threshold_default']
| 1.835938
| 2
|
hangman.py
|
swatisrs/Hangman
| 4
|
12777181
|
<filename>hangman.py
import random
from bs4 import BeautifulSoup
import urllib.request
import requests
def display_hangman(tries):
stages = [
"""
--------
| |
| O
| \\|/
| |
| / \\
-
""",
"""
--------
| |
| O
| \\|/
| |
| /
-
""",
"""
--------
| |
| O
| \\|/
| |
|
-
""",
"""
--------
| |
| O
| \\|
| |
|
-
""",
"""
--------
| |
| O
| |
| |
|
-
""",
"""
--------
| |
| O
|
|
|
-
""",
"""
--------
| |
|
|
|
|
-
"""
]
return stages[tries]
def hangman(tally):
words=['malayalam','tomato','pumpkin','moon','goat']
x=random.randint(0,len(words)-1)
a=random.choice(words)
#a=a.decode('utf-8')
a=a.lower()
b=str()
print('Your word is: ', end= ' ')
for i in range (0, len(a)):
b= b+'_'
print(b[i] ,end= ' ')
guess=[]
wrong=[]
points=tally[1]
tries=tally[0]
while(tries>0):
print("\nYour guesses so far:",guess)
print('\n\nYou have ' +str(tries)+' chances to guess the word')
x=input("Enter 0 to stop this game or Enter an alphabet: ")
if(x=='0'):
break
if(len(x)!=1):
print("Enter only a single alphabet")
continue
if(x.isalpha()==False):
print("Enter a valid alphabet")
continue
x= x.lower()
guess.append(x)
buffer=0
buffer1=0
found=0
for i in range (0,len(a)):
if(a[i]==x):
found=1
buffer=1
b=list(b)
b[i]=x
b="".join(b)
points=points+1
if(buffer==1):
print("Correct guess :)")
for j in range (0, len(b)):
print(b[j] ,end= ' ')
if(found==0):
print("Incorrect guess, try harder!!")
for j in range (0, len(b)):
print(b[j] ,end= ' ')
if(buffer==0):
if x in wrong:
buffer1=1
if(buffer1==0):
print(display_hangman(tries-1))
tries=tries-1
wrong.append(x)
buf=0
for i in range(0, len(b)):
if(b[i]=='_'):
buf=1
if(buf==0):
print('\nYou have guessed the word with ' + str(tries) +' chances left \n'+ str(points)+' points\n\n')
tally[0]=tries
tally[1]=points
return tally
if(tries==0):
print('You lost. Better luck next time! \n Your word was: '+a+'\nYour points are: '+str(points))
print(a)
tally[0]=0
tally[1]=points
return tally
print('HANGMAN')
tally=[0,0]
while True:
x=input('Press:\n 1.To play a new game \n 2. Continue existing game \n 3. Exit\n')
if(x=='1'):
tally=[7,0]
tally= hangman(tally)
elif(x=='2'):
if(tally[0]==0):
print('There is no saved game, here is a new one\n')
tally[0]=7
tally[1]=0
tally=hangman(tally)
elif(x=='3'):
exit()
else:
print("Enter a valid response ")
import time
import threading
t = threading.Thread(target=hangman)
t.daemon = True
t.start()
time.sleep(6)
| 3.65625
| 4
|
connection_plugin/macros/__init__.py
|
bakdata/connection_plugin
| 1
|
12777182
|
from airflow.hooks.base_hook import BaseHook
def get_conn(conn_id):
# get connection by name from BaseHook
conn = BaseHook.get_connection(conn_id)
return conn
| 1.703125
| 2
|
FWCore/GuiBrowsers/python/JSONExport.py
|
NTrevisani/cmssw
| 3
|
12777183
|
from __future__ import absolute_import
import sys
import os.path
import logging
import random
import FWCore.ParameterSet.SequenceTypes as sqt
import FWCore.ParameterSet.Config as cms
import FWCore.ParameterSet.Modules as mod
import FWCore.ParameterSet.Types as typ
import FWCore.ParameterSet.Mixins as mix
from .Vispa.Plugins.ConfigEditor.ConfigDataAccessor import ConfigDataAccessor
from FWCore.GuiBrowsers.FileExportPlugin import FileExportPlugin
class JsonExport(FileExportPlugin):
option_types={}
plugin_name='JSON Export'
file_types=('html','json')
def __init__(self):
FileExportPlugin.__init__(self)
def produce(self,data):
#pset = lambda pdict: [[k,repr(v).split('(',1)[0],(repr(v).split('(',1)[1][:-1])] for k,v in pdict.items()]
def pset(pdict):
result = []
for k,v in pdict.items():
if v.pythonTypeName()=='cms.PSet' or v.pythonTypeName()=='cms.untracked.PSet':
result.append([k,v.pythonTypeName(),'pset',pset(v.parameters_())])
elif v.pythonTypeName()=='cms.VPSet' or v.pythonTypeName()=='cms.untracked.VPSet':
result.append([k,v.pythonTypeName(),'vpset',[pset(a.parameters_()) for a in v]])
elif v.pythonTypeName().lower().startswith('cms.v') or v.pythonTypeName().lower().startswith('cms.untracked.v'):
result.append([k,v.pythonTypeName(),'list',[repr(a) for a in v]])
else:
result.append([k,v.pythonTypeName(),'single',repr(v.pythonValue())])
return result
#allObjects = [d for d in data._allObjects if (data.type(d) in ("EDProducer","EDFilter","EDAnalyzer","OutputModule"))]
#data.readConnections(allObjects)
def moduledict(mod,prefix,links=False):
result={}
result['label']=data.label(mod)
result['class']=data.classname(mod)
result['file']=data.pypath(mod)
result['line']=data.lineNumber(mod)
result['package']=data.pypackage(mod)
result['pset']=pset(mod.parameters_())
result['type']=data.type(mod)
if links:
result['uses']=[data.uses(mod)]
result['usedby']=[data.usedBy(mod)]
result['id']='%s_%s'%(prefix,data.label(mod))
return result
all={}
for tlo in data.topLevelObjects():
children=data.children(tlo)
if children:
all[tlo._label]=children
process = {'name':data.process().name_(),'src':data._filename}
#now unavailable
#schedule = []
#if 'Schedule' in all:
# for s in all['Schedule']:
# schedule.append(data.label(s))
source={}
if 'source' in all:
s = all['source'][0]
source['class']=data.classname(s)
source['pset']=pset(s.parameters_())
essources=[]
if 'essources' in all:
for e in all['essources']:
essources.append(moduledict(e,'essource'))
esproducers=[]
if 'esproducers' in all:
for e in all['esproducers']:
essources.append(moduledict(e,'esproducer'))
esprefers=[]
if 'esprefers' in all:
for e in all['esprefers']:
essources.append(moduledict(e,'esprefers'))
services=[]
if 'services' in all:
for s in all['services']:
services.append({'class':data.classname(s),'pset':pset(s.parameters_())})
def jsonPathRecursive(p,prefix):
#print "At:",self.label(p),self.type(p)
children = data.children(p)
if children:
children = [jsonPathRecursive(c,prefix) for c in children]
return {'type':'Sequence','label':'Sequence %s'%(data.label(p)),'id':'seq_%s' % data.label(p),'children':children}
else:
return moduledict(p,prefix,True)
paths=[]
if 'paths' in all:
for p in all['paths']:
path=jsonPathRecursive(p,data.label(p))
if path:
if not isinstance(path, type([])):
if path['type']=='Sequence':
path = path['children']
else:
path = [path]
else:
path=[]
paths.append({'label':data.label(p),'path':path})
endpaths=[]
if 'endpaths' in all:
for p in all['endpaths']:
path=jsonPathRecursive(p,data.label(p))
if path:
if not isinstance(path, type([])):
if path['type']=='Sequence':
path = path['children']
else:
path = [path]
else:
path=[]
endpaths.append({'label':data.label(p),'path':path})
#json={'process':process,'schedule':schedule,'source':source,'essources':essources,'esproducers':esproducers,'esprefers':esprefers,'services':services,'paths':paths,'endpaths':endpaths}
json={'process':process,'source':source,'essources':essources,'esproducers':esproducers,'esprefers':esprefers,'services':services,'paths':paths,'endpaths':endpaths}
return repr(json)
def export(self,data,filename,filetype):
if not data.process():
raise "JSONExport requires a cms.Process object"
json = self.produce(data)
if filetype=='json':
jsonfile = open(filename,'w')
jsonfile.write(json)
jsonfile.close()
if filetype=='html':
#open the HTML template and inject the JSON...
pass
| 2.015625
| 2
|
gdocorg/__init__.py
|
tgbugs/gdocorgpy
| 5
|
12777184
|
<filename>gdocorg/__init__.py
#!/usr/bin/env python3.6
import io
from pathlib import Path
from googleapiclient.discovery import build
from googleapiclient.http import MediaIoBaseDownload
from httplib2 import Http
from oauth2client import file, client, tools
from pyontutils.config import devconfig
from IPython import embed
spath = Path(devconfig.secrets_file).parent
# derp
# https://developers.google.com/drive/api/v3/integrate-open#open_and_convert_google_docs_in_your_app
class UnhandledElementError(Exception):
""" Don't know what to do with this fellow ... """
def get_oauth_service(store_file,
creds_file=None, # if store_file exists don't need this
# if creds_file is None it will fail loudly before SCOPES
# drop .readonly to get rw
# WARNING be sure to also change your store_file name if you do this
SCOPES='https://www.googleapis.com/auth/drive.readonly'):
# https://developers.google.com/drive/api/v3/about-auth
store = file.Storage((spath / store_file).as_posix())
creds = store.get()
if not creds or creds.invalid:
if creds_file is None:
etype = 'no creds' if not creds else 'bad creds'
msg = f'{etype} and store-file not set. Run main to set up.'
raise FileNotFoundError(msg)
flow = client.flow_from_clientsecrets((spath / creds_file).as_posix(), SCOPES)
creds = tools.run_flow(flow, store)
service = build('drive', 'v3', http=creds.authorize(Http()))
return service
def get_docs_service(store_file):
DISCOVERY_DOC = 'https://docs.googleapis.com/$discovery/rest?version=v1'
store = file.Storage((spath / store_file).as_posix())
creds = store.get()
service = build('docs', 'v1', http=creds.authorize(Http()),
discoveryServiceUrl=DISCOVERY_DOC)
return service
class Convert:
mapping = (
(('paragraph'),),
(('paragraph'),),
)
class OrgDoc:
def __init__(self, org):
self.o = org
class DocOrg:
def __init__(self, docs):
self.docs = docs
def json(self, doc_name):
return self.docs.get_doc_json(doc_name)
def __call__(self, doc_name, start_heading=''):
self.j = self.json(doc_name)
self.start_heading = start_heading
org = ''
self.indent = ''
self.in_table = False
self.stack = []
elements = self.j['body']['content']
org += self.content(self.j['body']['content'])
self.j = None
self.start_heading = ''
return org
def content(self, content):
out = ''
elements = content
nexts = elements[1:] + [None]
for element, nxt in zip(elements, nexts):
e = self.element(element)
if nxt is not None:
# peeking to see if we need to strip trailing whitespace
if 'paragraph' in nxt and nxt['paragraph']['paragraphStyle']['namedStyleType'] != 'NORMAL_TEXT':
e = e.rstrip(' ') # we have a header!
out += e
return out
def element(self, element):
# start index and end index are probably useful ...
# esp for crdt type updates
types = 'table', 'paragraph', 'sectionBreak', 'tableOfContents'
for t in types:
if t in element:
return getattr(self, t)(element[t])
else:
print(element.keys())
def table(self, table):
sep = '|'
out = ''
self.in_table = True
for row in table['tableRows']:
for cell in row['tableCells']:
out += sep + ' '
for element in cell['content']:
content = self.element(element)
content = content.replace('\n', '') # FIXME can newline but hard
out += content + ' '
out += sep + '\n' + self.indent
self.in_table = False
return out
def paragraph(self, paragraph):
#print(self.stack)
mapping = {
'NORMAL_TEXT': '',
'TITLE': '* ',
'HEADING_1': '** ',
'HEADING_2': '*** ',
'HEADING_3': '**** ',
'HEADING_4': '***** ',
}
out = ''
style = paragraph['paragraphStyle']['namedStyleType']
head = mapping[style]
lh = len(head)
if lh:
head = self.start_heading + head
lh = len(head)
lsh = len(self.start_heading)
self.indent = ' ' * lh
while self.stack and lh <= len(mapping[self.stack[-1]]) + lsh:
o_head = self.stack.pop(-1)
self.stack.append(style)
out += head
if 'bullet' in paragraph:
bullet = paragraph['bullet']
listId = bullet['listId']
lst = self.j['lists'][listId]
nls = lst['listProperties']['nestingLevels']
if 'glyphType' in nls[0]:
symbol = '1. '
elif 'glyphSymbol' in nls[0]:
symbol = '- '
if 'nestingLevel' in bullet:
nestingLevel = bullet['nestingLevel']
else:
nestingLevel = 0
bhead = (nestingLevel * 2 * ' ') + symbol
else:
bhead = ''
for i, element in enumerate(paragraph['elements']):
e = self.paragraph_element(element)
if not i and e.strip(): # issue where bhead appended when last element is empty!?
e = bhead + e
out += e
return out
def paragraph_element(self, element):
types = 'textRun', 'inlineObjectElement', 'pageBreak', 'footnoteReference'
for t in types:
if t in element:
return getattr(self, t)(element[t])
else:
raise UnhandledElementError(str(element))
def pageBreak(self, v):
return '\n'
def inlineObjectElement(self, ioe):
oid = ioe['inlineObjectId']
iobj = self.j['inlineObjects'][oid]
eobj = iobj['inlineObjectProperties']['embeddedObject']
types = 'imageProperties', 'embeddedDrawingProperties'
for t in types:
if t in eobj:
obj = eobj[t]
if obj:
uri = obj['contentUri']
return f'[[{uri}]]'
else:
return f'>>>Missing embedded object {oid}!<<<'
else:
raise TypeError(f'Unknown type in {list(eobj.keys())}')
def textRun(self, tr):
ts = tr['textStyle']
styles = 'underline', 'bold', 'italic', 'strikethrough'
lt = '' if self.in_table else '\n' + self.indent
mapping = {
'underline': '_',
'bold': '*',
'italic': '/',
'strikethrough': '+', # TODO haven't seen this yet
'line-terminator': lt,
'trailing-whitespace': ' ',
}
stack = []
out = '' # FIXME reverse whitespace ...
content = tr['content']
content = self.textRun_content_normalize(content, lt)
# don't style trailing whitespace
# it is too hard to fix this stuff in the wysiwyg so just fix it here
while content.endswith(' '):
stack.append('trailing-whitespace')
content = content[:-1]
while content.endswith('\n'):
stack.append('line-terminator')
content = content[:-1]
# don't style leading whitespace
# it is too hard to fix this stuff in the wysiwyg so just fix it here
while content.startswith(' '):
out += ' '
content = content[1:]
if content: # only style if there is content
for style in styles:
if style in ts:
if style == 'underline' and 'link' in ts:
style = 'link'
href = ts['link']['url']
mapping['link'] = f'[[{href}]['
out += mapping[style]
stack.append(style)
out += content
while stack:
style = stack.pop(-1)
if style == 'link':
out += ']]'
else:
out += mapping[style]
return out
@staticmethod
def textRun_content_normalize(content, lt):
vertical_tab = '\x0b' # apparently C-<enter> in docs produces this madness
return content.replace(vertical_tab, lt)
def footnoteReference(self, value):
footnoteId = value['footnoteId']
fobj = self.j['footnotes'][footnoteId]
out = '[fn::'
fn = self.content(fobj['content'])
out += fn.strip() # FIXME pass a skip leading whitespace argument?
out += ']'
return out
def sectionBreak(self, value):
return '\n'
def tableOfContents(self, value):
return ''
class Docs:
def __init__(self, store_file, converter=DocOrg):
self.service = get_docs_service(store_file)
self.converter = converter(self)
def get_doc_json(self, doc_name):
file_id = devconfig.secrets('google', 'docs', doc_name)
return self.service.documents().get(documentId=file_id,
suggestionsViewMode='SUGGESTIONS_INLINE').execute()
def get_doc_org(self, doc_name, start_heading='**'):
return self.converter(doc_name, start_heading=start_heading)
class Drive:
def __init__(self, store_file):
self.service = get_oauth_service(store_file)
def get_doc(self, doc_name, mimeType='text/plain'):
file_id = devconfig.secrets('google', 'docs', doc_name)
request = self.service.files().export_media(fileId=file_id,
mimeType=mimeType)
fh = io.BytesIO()
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("Download %d%%." % int(status.progress() * 100))
fh.seek(0)
return fh.read()
def main():
# setup
store_file = devconfig.secrets('google', 'api', 'drive', 'store-file')
if not Path(store_file).exists():
SCOPES = 'https://www.googleapis.com/auth/drive.readonly' # FIXME config this
creds_file = devconfig.secrets('google', 'api', 'creds-file')
get_oauth_service(store_file, creds_file, SCOPES)
if __name__ == '__main__':
main()
| 2.46875
| 2
|
fairseq/criterions/label_smoothed_cross_entropy_with_regularization.py
|
raphaelmerx/fairseq_extension
| 2
|
12777185
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import torch.nn.functional as F
from fairseq import metrics, utils
from fairseq.criterions import register_criterion
from .label_smoothed_cross_entropy import LabelSmoothedCrossEntropyCriterion
@register_criterion('label_smoothed_cross_entropy_with_regularization')
class LabelSmoothedCrossEntropyCriterionWithRegularization(LabelSmoothedCrossEntropyCriterion):
def __init__(self, task, sentence_avg, label_smoothing, regularization_weight):
super().__init__(task, sentence_avg, label_smoothing)
self.regularization_weight = regularization_weight
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
LabelSmoothedCrossEntropyCriterion.add_args(parser)
parser.add_argument('--regularization_weight', default=1.0, type=float, metavar='D',
help='weight for the regularization loss')
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
if 'primary' not in sample or 'secondary' not in sample:
return super().forward(model, sample, reduce=reduce)
primary_net_output = model(**sample['primary']['net_input'])
primary_loss, primary_nll_loss = self.compute_loss(model, primary_net_output, sample['primary'], reduce=reduce)
primary_sample_size = sample['primary']['target'].size(0) if self.sentence_avg else sample['primary']['ntokens']
secondary_net_output = model(**sample['secondary']['net_input'])
secondary_loss, secondary_nll_loss = self.compute_loss(model, secondary_net_output, sample['secondary'], reduce=reduce)
secondary_sample_size = sample['secondary']['target'].size(0) if self.sentence_avg else sample['secondary']['ntokens']
primary_targets = model.get_targets(sample['primary'], primary_net_output).unsqueeze(-1)
secondary_targets = model.get_targets(sample['secondary'], secondary_net_output).unsqueeze(-1)
pad_mask = primary_targets.eq(self.padding_idx) | secondary_targets.eq(self.padding_idx)
regularization_loss = self.compute_regularization_loss(model, primary_net_output, secondary_net_output, pad_mask=pad_mask, reduce=reduce)
loss = primary_loss + secondary_loss + self.regularization_weight * regularization_loss
nll_loss = primary_nll_loss + secondary_nll_loss
ntokens = sample['primary']['ntokens'] + sample['secondary']['ntokens']
nsentences = sample['primary']['target'].size(0) + sample['secondary']['target'].size(0)
sample_size = primary_sample_size + secondary_sample_size
logging_output = {
'loss': utils.item(loss.data) if reduce else loss.data,
'nll_loss': utils.item(nll_loss.data) if reduce else nll_loss.data,
'regularization_loss': utils.item(regularization_loss.data) if reduce else regularization_loss.data,
'ntokens': ntokens,
'nsentences': nsentences,
'sample_size': sample_size,
}
return loss, sample_size, logging_output
def compute_regularization_loss(self, model, primary_net_output, secondary_net_output, pad_mask=None, reduce=True):
mean_net_output = (primary_net_output[0] + secondary_net_output[0]) / 2
m = model.get_normalized_probs((mean_net_output,), log_probs=False)
p = model.get_normalized_probs(primary_net_output, log_probs=True)
q = model.get_normalized_probs(secondary_net_output, log_probs=True)
primary_loss = F.kl_div(p, m, reduction='none')
secondary_loss = F.kl_div(q, m, reduction='none')
if pad_mask is not None:
primary_loss.masked_fill_(pad_mask, 0.)
secondary_loss.masked_fill_(pad_mask, 0.)
if reduce:
primary_loss = primary_loss.sum()
secondary_loss = secondary_loss.sum()
loss = (primary_loss + secondary_loss) / 2
return loss
@staticmethod
def reduce_metrics(logging_outputs) -> None:
"""Aggregate logging outputs from data parallel training."""
loss_sum = utils.item(sum(log.get('loss', 0) for log in logging_outputs))
nll_loss_sum = utils.item(sum(log.get('nll_loss', 0) for log in logging_outputs))
regularization_loss_sum = utils.item(sum(log.get('regularization_loss', 0) for log in logging_outputs))
ntokens = utils.item(sum(log.get('ntokens', 0) for log in logging_outputs))
sample_size = utils.item(sum(log.get('sample_size', 0) for log in logging_outputs))
metrics.log_scalar('loss', loss_sum / sample_size / math.log(2), sample_size, round=3)
metrics.log_scalar('nll_loss', nll_loss_sum / ntokens / math.log(2), ntokens, round=3)
metrics.log_scalar('regularization_loss', regularization_loss_sum / sample_size, sample_size, round=3)
metrics.log_derived('ppl', lambda meters: utils.get_perplexity(meters['nll_loss'].avg))
@staticmethod
def logging_outputs_can_be_summed() -> bool:
"""
Whether the logging outputs returned by `forward` can be summed
across workers prior to calling `reduce_metrics`. Setting this
to True will improves distributed training speed.
"""
return True
| 2.40625
| 2
|
tests/real/test_real_proportional.py
|
simberaj/votelib
| 13
|
12777186
|
import sys
import os
import csv
import decimal
import pytest
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
import votelib.candidate
import votelib.convert
import votelib.evaluate.threshold
import votelib.evaluate.proportional
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
@pytest.fixture(scope='module')
def sk_nr_2020_data():
fpath = os.path.join(DATA_DIR, 'sk_nr_2020.csv')
with open(fpath, encoding='utf8') as infile:
rows = list(csv.reader(infile, delimiter=';'))
party_names, coalflags, votes, seats = [list(x) for x in zip(*rows)]
parties = [
votelib.candidate.Coalition(name=name, parties=[
votelib.candidate.PoliticalParty(pname)
for pname in name.split('-')
])
if int(coalflag) else votelib.candidate.PoliticalParty(name)
for name, coalflag in zip(party_names, coalflags)
]
return dict(zip(parties, [int(v) for v in votes])), {
party: int(n_seats)
for party, n_seats in zip(parties, seats) if int(n_seats) > 0
}
def get_sk_nr_evaluator():
standard_elim = votelib.evaluate.threshold.RelativeThreshold(
decimal.Decimal('.05'), accept_equal=True
)
mem_2_3_elim = votelib.evaluate.threshold.RelativeThreshold(
decimal.Decimal('.07'), accept_equal=True
)
mem_4plus_elim = votelib.evaluate.threshold.RelativeThreshold(
decimal.Decimal('.1'), accept_equal=True
)
eliminator = votelib.evaluate.threshold.CoalitionMemberBracketer(
{1: standard_elim, 2: mem_2_3_elim, 3: mem_2_3_elim},
mem_4plus_elim
)
# main evaluator
evaluator = votelib.evaluate.proportional.LargestRemainder(
'hagenbach_bischoff_rounded'
)
# TODO: missing provisions for tie handling and low amount of candidates
return votelib.evaluate.core.Conditioned(eliminator, evaluator)
def test_sk_nr_2020(sk_nr_2020_data):
votes, results = sk_nr_2020_data
nominator = votelib.candidate.PartyNominator()
for cand in votes.keys():
nominator.validate(cand)
assert get_sk_nr_evaluator().evaluate(votes, 150) == results
CZ_EP_EVALUATOR = votelib.evaluate.core.FixedSeatCount(
votelib.evaluate.core.Conditioned(
votelib.evaluate.threshold.RelativeThreshold(
decimal.Decimal('.05'), accept_equal=True
),
votelib.evaluate.proportional.HighestAverages('d_hondt')
),
21
)
def test_cz_ep_2019():
votes = {
'Klub angažovaných nestraníků': 2580,
'Strana nezávislosti ČR': 9676,
'Cesta odpovědné společnosti': 7890,
'Národní socialisté': 1312,
'Občanská demokratická strana': 344885,
'ANO, vytrollíme europarlament': 37046,
'Česká strana sociálně demokratická': 93664,
'Romská demokratická strana': 1651,
'KSČM': 164624,
'Koalice DSSS a NF': 4363,
'SPR-RSČ': 4284,
'<NAME>, ND': 18715,
'Pravý Blok': 4752,
'NE-VOLIM.CZ': 2221,
'Pro Česko': 2760,
'Vědci pro Českou republiku': 19492,
'Koalice ČSNS, Patrioti ČR': 1289,
'JSI PRO?Jist.Solid.In.pro bud.': 836,
'PRO Zdraví a Sport': 7868,
'Moravské zemské hnutí': 3195,
'Česká Suverenita': 2609,
'TVŮJ KANDIDÁT': 1653,
'HLAS': 56449,
'<NAME>, RČ': 15492,
'<NAME>AN, TOP 09': 276220,
'Česká pirátská strana': 330844,
'Svoboda a přímá demokracie': 216718,
'Aliance národních sil': 1971,
'ANO 2011': 502343,
'Agrární demokratická strana': 4004,
'Moravané': 6599,
'První Republika': 844,
'Demokratická strana zelených': 14339,
'Bezpečnost,Odpovědnost,Solid.': 2583,
'<NAME>kromníci, NEZ': 8720,
'Evropa společně': 12587,
'Konzervativní Alternativa': 235,
'KDU-ČSL': 171723,
'Alternativa pro Česk. rep.2017': 11729,
}
results = {
'ANO 2011': 6,
'Občanská demokratická strana': 4,
'Česká pirátská strana': 3,
'Koalice STAN, TOP 09': 3,
'Svoboda a přímá demokracie': 2,
'KDU-ČSL': 2,
'KSČM': 1,
}
assert CZ_EP_EVALUATOR.evaluate(votes) == results
CZ_PSP_EVALUATOR = votelib.evaluate.core.ByConstituency(
votelib.evaluate.proportional.HighestAverages('d_hondt'),
votelib.evaluate.proportional.LargestRemainder('hare'),
preselector=votelib.evaluate.threshold.RelativeThreshold(
decimal.Decimal('.05'), accept_equal=True
)
)
@pytest.fixture(scope='module')
def cz_psp_2017_votes():
fpath = os.path.join(DATA_DIR, 'cz_psp_2017.csv')
with open(fpath, encoding='utf8') as infile:
rows = list(csv.reader(infile, delimiter=';'))
region_names = rows[0][1:]
votes = {region: {} for region in region_names}
for row in rows[1:]:
party = row[0]
for regname, n_votes in zip(region_names, row[1:]):
votes[regname][party] = int(n_votes)
return votes
def test_cz_psp_2017(cz_psp_2017_votes):
reg_results = CZ_PSP_EVALUATOR.evaluate(cz_psp_2017_votes, 200)
nat_agg = votelib.convert.VoteTotals()
assert nat_agg.convert(reg_results) == {
'ANO': 78,
'ODS': 25,
'Piráti': 22,
'SPD': 22,
'ČSSD': 15,
'KSČM': 15,
'KDU-ČSL': 10,
'TOP 09': 7,
'STAN': 6,
}
assert reg_results['Hlavní město Praha'] == {
'ANO': 6,
'ODS': 5,
'Piráti': 5,
'SPD': 1,
'ČSSD': 1,
'KSČM': 1,
'KDU-ČSL': 1,
'TOP 09': 3,
'STAN': 1,
}
assert reg_results['Karlovarský kraj'] == {
'ANO': 3,
'Piráti': 1,
'SPD': 1,
}
def get_evaluators():
return [
CZ_EP_EVALUATOR,
CZ_PSP_EVALUATOR,
get_sk_nr_evaluator(),
]
| 2.28125
| 2
|
deadtrees/network/extra/efficientunetplusplus/model.py
|
cwerner/deadtrees
| 1
|
12777187
|
<reponame>cwerner/deadtrees
from typing import List, Optional, Union
from segmentation_models_pytorch.base import (
ClassificationHead,
SegmentationHead,
SegmentationModel,
)
from segmentation_models_pytorch.encoders import get_encoder
import torch
from torchvision import transforms
from .decoder import EfficientUnetPlusPlusDecoder
class EfficientUnetPlusPlus(SegmentationModel):
"""The EfficientUNet++ is a fully convolutional neural network for ordinary and medical image semantic segmentation.
Consists of an *encoder* and a *decoder*, connected by *skip connections*. The encoder extracts features of
different spatial resolutions, which are fed to the decoder through skip connections. The decoder combines its
own feature maps with the ones from skip connections to produce accurate segmentations masks. The EfficientUNet++
decoder architecture is based on the UNet++, a model composed of nested U-Net-like decoder sub-networks. To
increase performance and computational efficiency, the EfficientUNet++ replaces the UNet++'s blocks with
inverted residual blocks with depthwise convolutions and embedded spatial and channel attention mechanisms.
Synergizes well with EfficientNet encoders. Due to their efficient visual representations (i.e., using few channels
to represent extracted features), EfficientNet encoders require few computation from the decoder.
Args:
encoder_name: Name of the classification model that will be used as an encoder (a.k.a backbone) to extract features
encoder_depth: Number of stages of the encoder, in range [3 ,5]. Each stage generate features two times smaller,
in spatial dimensions, than the previous one (e.g., for depth=0 features will haves shapes [(N, C, H, W)]),
for depth 1 features will have shapes [(N, C, H, W), (N, C, H // 2, W // 2)] and so on).
Default is 5
encoder_weights: One of **None** (random initialization), **"imagenet"** (pre-training on ImageNet) and
other pretrained weights (see table with available weights for each encoder_name)
decoder_channels: List of integers which specify **in_channels** parameter for convolutions used in the decoder.
Length of the list should be the same as **encoder_depth**
in_channels: The number of input channels of the model, default is 3 (RGB images)
classes: The number of classes of the output mask. Can be thought of as the number of channels of the mask
activation: An activation function to apply after the final convolution layer.
Available options are **"sigmoid"**, **"softmax"**, **"logsoftmax"**, **"tanh"**, **"identity"**, **callable** and **None**.
Default is **None**
aux_params: Dictionary with parameters of the auxiliary output (classification head). Auxiliary output is built
on top of encoder if **aux_params** is not **None** (default). Supported params:
- classes (int): A number of classes
- pooling (str): One of "max", "avg". Default is "avg"
- dropout (float): Dropout factor in [0, 1)
- activation (str): An activation function to apply "sigmoid"/"softmax" (could be **None** to return logits)
Returns:
``torch.nn.Module``: **EfficientUnet++**
Reference:
https://arxiv.org/abs/2106.11447
"""
def __init__(
self,
encoder_name: str = "timm-efficientnet-b5",
encoder_depth: int = 5,
encoder_weights: Optional[str] = "imagenet",
decoder_channels: List[int] = (256, 128, 64, 32, 16),
squeeze_ratio: int = 1,
expansion_ratio: int = 1,
in_channels: int = 3,
classes: int = 1,
activation: Optional[Union[str, callable]] = None,
aux_params: Optional[dict] = None,
):
super().__init__()
self.classes = classes
self.encoder = get_encoder(
encoder_name,
in_channels=in_channels,
depth=encoder_depth,
weights=encoder_weights,
)
self.decoder = EfficientUnetPlusPlusDecoder(
encoder_channels=self.encoder.out_channels,
decoder_channels=decoder_channels,
n_blocks=encoder_depth,
squeeze_ratio=squeeze_ratio,
expansion_ratio=expansion_ratio,
)
self.segmentation_head = SegmentationHead(
in_channels=decoder_channels[-1],
out_channels=classes,
activation=activation,
kernel_size=3,
)
if aux_params is not None:
self.classification_head = ClassificationHead(
in_channels=self.encoder.out_channels[-1], **aux_params
)
else:
self.classification_head = None
self.name = "EfficientUNet++-{}".format(encoder_name)
self.initialize()
def predict(self, x):
"""Inference method. Switch model to `eval` mode, call `.forward(x)` with `torch.no_grad()`
Args:
x: 4D torch tensor with shape (batch_size, channels, height, width)
Return:
prediction: 4D torch tensor with shape (batch_size, classes, height, width)
"""
if self.training:
self.eval()
with torch.no_grad():
output = self.forward(x)
if self.classes > 1:
probs = torch.softmax(output, dim=1)
else:
probs = torch.sigmoid(output)
probs = probs.squeeze(0)
tf = transforms.Compose(
[
transforms.ToPILImage(),
transforms.Resize(x.size[1]),
transforms.ToTensor(),
]
)
full_mask = tf(probs.cpu())
return full_mask
| 2.640625
| 3
|
02_app/utils.py
|
FelipeTe/DS4A
| 0
|
12777188
|
import os
import requests
from shapely.geometry import Point
import geopandas as gpd
def geo_code(address, city):
"""
Geo code address sing open maps API
Parameters
------------
address: str
Address as clear as possible, better to check first if it can be found in open street search engine
city: str
Name of the city
Returns
---------
results: dict
dictionary with latitude, longitud and state name information
"""
parameters = {'key': os.environ.get("CON_KEY"),
'location': '{0:s}, {1:s}, Brazil'.format(address, city),
'thumbMaps': False,
'maxResults': 1
}
response = requests.get('http://www.mapquestapi.com/geocoding/v1/address', params=parameters)
assert response.status_code==200, 'Review address or internet connection'
results = response.json()['results'][0]['locations'][0]['latLng']
results['state_name'] = response.json()['results'][0]['locations'][0]['adminArea3']
results['street_name'] = response.json()['results'][0]['locations'][0]['street']
assert results['lat']!=39.78373, 'Review address or internet connection'
return results
def convert_geo_to_sector_code(geo_code_output, states_dict, path_to_shapes):
"""
Conver latitud, longitud and state reference to sector code
Parameters
------------
geo_code_output: dict
output of geo_code function
states_dict: dict
correspondence of states names
path_to_shapes: str
path to folder containing shapes
Returns
---------
sector code: str
"""
coordinate_point = Point(geo_code_output['lng'], geo_code_output['lat'])
state_in_response = geo_code_output['state_name']
state_name = states_dict[state_in_response]
assert state_name in os.listdir(path_to_shapes), 'There is no shape available to reference this address'
file_name = [file for file in os.listdir(path_to_shapes+'/'+state_name) if file.find('.shp')>0][0]
census_sector = gpd.read_file(path_to_shapes+'/{0:s}/{1:s}'.format(state_name, file_name), encoding='latin1')
sector_code = census_sector.loc[census_sector.contains(coordinate_point), 'CD_GEOCODI'].values[0]
return sector_code
def flat_cell(cell):
"""
flat dictionarys in celss
"""
if isinstance(cell, dict):
value_cell = list(cell.values())[0]
else:
value_cell = cell
return value_cell
| 3.265625
| 3
|
zephyr/zmake/zmake/zmake.py
|
sjg20/ec
| 0
|
12777189
|
<filename>zephyr/zmake/zmake/zmake.py
# Copyright 2020 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module encapsulating Zmake wrapper object."""
import logging
import os
import pathlib
import shutil
import subprocess
import tempfile
import zmake.build_config
import zmake.modules
import zmake.jobserver
import zmake.multiproc
import zmake.project
import zmake.toolchains as toolchains
import zmake.util as util
class Zmake:
"""Wrapper class encapsulating zmake's supported operations."""
def __init__(self, checkout=None, jobserver=None, jobs=0):
if checkout:
self.checkout = pathlib.Path(checkout)
else:
self.checkout = util.locate_cros_checkout()
assert self.checkout.exists()
if jobserver:
self.jobserver = jobserver
else:
try:
self.jobserver = zmake.jobserver.GNUMakeJobClient.from_environ()
except OSError:
self.jobserver = zmake.jobserver.GNUMakeJobServer(jobs=jobs)
self.logger = logging.getLogger(self.__class__.__name__)
def configure(self, project_dir, build_dir,
version=None, zephyr_base=None, module_paths=None,
toolchain=None, ignore_unsupported_zephyr_version=False,
build_after_configure=False, test_after_configure=False):
"""Set up a build directory to later be built by "zmake build"."""
# Make sure the build directory is clean.
if os.path.exists(build_dir):
self.logger.info("Clearing old build directory %s", build_dir)
shutil.rmtree(build_dir)
project = zmake.project.Project(project_dir)
if version:
# Ignore the patchset.
version = version[:2]
if (not ignore_unsupported_zephyr_version
and version not in project.config.supported_zephyr_versions):
raise ValueError(
'Requested version (v{}.{}) is not supported by the '
'project. You may wish to either configure zmake.yaml to '
'support this version, or pass '
'--ignore-unsupported-zephyr-version.'.format(*version))
else:
# Assume the highest supported version by default.
version = max(project.config.supported_zephyr_versions)
if not zephyr_base:
zephyr_base = util.locate_zephyr_base(self.checkout, version)
zephyr_base = zephyr_base.resolve()
if not module_paths:
module_paths = zmake.modules.locate_modules(self.checkout, version)
if not module_paths['zephyr-chrome']:
raise OSError("Missing zephyr-chrome module")
base_config = zmake.build_config.BuildConfig(
environ_defs={'ZEPHYR_BASE': str(zephyr_base),
'PATH': '/usr/bin'},
cmake_defs={'DTS_ROOT': module_paths['zephyr-chrome']})
module_config = zmake.modules.setup_module_symlinks(
build_dir / 'modules', module_paths)
if not toolchain:
toolchain = project.config.toolchain
if project.config.zephyr_sdk_is_preferred:
try:
toolchains.find_zephyr_sdk()
except OSError:
self.logger.warning(
'Unable to find the Zephyr SDK, which is the preferred '
'toolchain for this project (however, unavailable in '
'the chroot by default). Using %r instead, which '
'will probably compile but may not actually work at '
'all. See go/zephyr-care for more info.', toolchain)
else:
self.logger.info(
'Zephyr SDK is available. Using it instead of %r.',
toolchain)
toolchain = 'zephyr'
toolchain_config = toolchains.get_toolchain(toolchain, module_paths)
if not build_dir.exists():
build_dir = build_dir.mkdir()
processes = []
self.logger.info('Building %s in %s.', project_dir, build_dir)
for build_name, build_config in project.iter_builds():
self.logger.info('Configuring %s:%s.', project_dir, build_name)
config = (base_config
| toolchain_config
| module_config
| build_config)
output_dir = build_dir / 'build-{}'.format(build_name)
kconfig_file = build_dir / 'kconfig-{}.conf'.format(build_name)
proc = config.popen_cmake(self.jobserver, project_dir, output_dir,
kconfig_file, stdin=subprocess.DEVNULL,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='utf-8',
errors='replace')
zmake.multiproc.log_output(self.logger, logging.DEBUG, proc.stdout)
zmake.multiproc.log_output(self.logger, logging.ERROR, proc.stderr)
processes.append(proc)
for proc in processes:
if proc.wait():
raise OSError(
"Execution of {} failed (return code={})!\n".format(
util.repr_command(proc.args), proc.returncode))
# Create symlink to project
util.update_symlink(project_dir, build_dir / 'project')
if test_after_configure:
return self.test(build_dir=build_dir)
elif build_after_configure:
return self.build(build_dir=build_dir)
def build(self, build_dir, output_files_out=None):
"""Build a pre-configured build directory."""
project = zmake.project.Project(build_dir / 'project')
procs = []
dirs = {}
for build_name, build_config in project.iter_builds():
self.logger.info('Building %s:%s.', build_dir, build_name)
dirs[build_name] = build_dir / 'build-{}'.format(build_name)
proc = self.jobserver.popen(
['/usr/bin/ninja', '-C', dirs[build_name]],
# Ninja will connect as a job client instead and claim
# many jobs.
claim_job=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='utf-8',
errors='replace')
zmake.multiproc.log_output(self.logger, logging.DEBUG, proc.stdout)
zmake.multiproc.log_output(self.logger, logging.ERROR, proc.stderr)
procs.append(proc)
for proc in procs:
if proc.wait():
raise OSError(
"Execution of {} failed (return code={})!\n".format(
util.repr_command(proc.args), proc.returncode))
# Run the packer.
packer_work_dir = build_dir / 'packer'
output_dir = build_dir / 'output'
for d in output_dir, packer_work_dir:
if not d.exists():
d.mkdir()
if output_files_out is None:
output_files_out = []
for output_file, output_name in project.packer.pack_firmware(
packer_work_dir, self.jobserver, **dirs):
shutil.copy2(output_file, output_dir / output_name)
self.logger.info('Output file \'%r\' created.', output_file)
output_files_out.append(output_file)
return 0
def test(self, build_dir):
"""Test a build directory."""
procs = []
output_files = []
self.build(build_dir, output_files_out=output_files)
# If the project built but isn't a test, just bail.
project = zmake.project.Project(build_dir / 'project')
if not project.config.is_test:
return 0
for output_file in output_files:
self.logger.info('Running tests in %s.', output_file)
proc = self.jobserver.popen(
[output_file],
claim_job=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='utf-8',
errors='replace')
zmake.multiproc.log_output(self.logger, logging.DEBUG, proc.stdout)
zmake.multiproc.log_output(self.logger, logging.ERROR, proc.stderr)
procs.append(proc)
for idx, proc in enumerate(procs):
if proc.wait():
raise OSError(
"Execution of {} failed (return code={})!\n".format(
util.repr_command(proc.args), proc.returncode))
return 0
def _run_pytest(self, directory):
"""Run pytest on a given directory.
This is a utility function to help parallelize running pytest on
multiple directories.
Args:
directory: The directory that we should run pytest on.
Returns:
The status code of pytest.
"""
self.logger.info('Running python test %s', directory)
proc = self.jobserver.popen(
['pytest', directory],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='utf-8',
errors='replace')
# Log stdout as DEBUG log messages.
zmake.multiproc.log_output(self.logger, logging.DEBUG, proc.stdout)
# Log stderr as ERROR log messages
zmake.multiproc.log_output(self.logger, logging.ERROR, proc.stderr)
return proc.wait()
def testall(self, fail_fast=False):
"""Test all the valid test targets"""
modules = zmake.modules.locate_modules(self.checkout, version=None)
root_dirs = [modules['zephyr-chrome'] / 'projects',
modules['zephyr-chrome'] / 'tests',
modules['ec-shim'] / 'zephyr/test']
project_dirs = []
for root_dir in root_dirs:
self.logger.info('Finding zmake target under \'%s\'.', root_dir)
for path in pathlib.Path(root_dir).rglob('zmake.yaml'):
project_dirs.append(path.parent)
executor = zmake.multiproc.Executor(fail_fast=fail_fast)
tmp_dirs = []
for project_dir in project_dirs:
is_test = zmake.project.Project(project_dir).config.is_test
temp_build_dir = tempfile.mkdtemp(
suffix='-{}'.format(os.path.basename(project_dir.as_posix())),
prefix='zbuild-')
tmp_dirs.append(temp_build_dir)
# Configure and run the test.
executor.append(
func=lambda: self.configure(
project_dir=pathlib.Path(project_dir),
build_dir=pathlib.Path(temp_build_dir),
build_after_configure=True,
test_after_configure=is_test))
# Run pytest on zephyr-chrome/tests and platform/ec/zephyr/zmake.
executor.append(func=lambda: self._run_pytest(
directory=modules['ec-shim'] / 'zephyr'))
rv = executor.wait()
for tmpdir in tmp_dirs:
shutil.rmtree(tmpdir)
return rv
| 1.945313
| 2
|
Utilities.py
|
haroldport/portfolio-scripts
| 0
|
12777190
|
<reponame>haroldport/portfolio-scripts
from os import system, name
class Utilities:
@staticmethod
def clear():
if name == 'nt':
_ = system('cls')
else:
_ = system('clear')
@staticmethod
def create_ticker(stocks):
while True:
ticker = input('Enter ticker: ')
stock_filtered = list(filter(lambda x: x.ticker == ticker, stocks))
if ticker.strip() and len(stock_filtered) == 0:
break
print('Empty or repeated value. Try again')
return ticker
@staticmethod
def create_percentage():
while True:
try:
percentage = int(input('Enter percentage: '))
if 0 <= percentage <= 100:
break
print('Try again')
except ValueError:
print('Try again')
return percentage
@staticmethod
def create_number_of_actions():
while True:
try:
actions = int(input('Enter number of actions: '))
break
except ValueError:
print('Try again')
return actions
| 3.046875
| 3
|
src/PaperCrawler.py
|
coutyou/THU-IR-BIG-HW-3
| 0
|
12777191
|
class Url(object):
def __init__(self, url, title, ref_num, depth):
self.url = url
self.title = title
self.ref_num = ref_num
self.depth = depth
def __lt__(self, other):
return self.ref_num > other.ref_num
def __gt__(self, other):
return self.ref_num < other.ref_num
def __eq__(self, other):
return self.ref_num == other.ref_num
class Paper(Url):
def __init__(self, url, title, ref_num, abstract, depth=-1):
super(Paper, self).__init__(url, title, ref_num, depth)
self.abstract = abstract
class Url_pool(object):
def __init__(self):
from heapq import heapify
self.url_his = set()
self.title_his = set()
self.urls = []
heapify(self.urls)
def append_url(self, url):
from heapq import heappush
import re
pun = "[\s+\.\!\/_,$%^*(+\"\']+|[+——!:‐-,。?、~@#¥%……&*()]+"
if re.sub(pun, "", url.title) in self.title_his:
pass
elif url.url in self.url_his:
pass
else:
self.url_his.add(url.url)
self.title_his.add(re.sub(pun, "", url.title))
heappush(self.urls, url)
def get_url(self):
from heapq import heappop
if len(self.urls) > 0:
return heappop(self.urls)
else:
return None
class PaperCrawler(object):
def __init__(self, init_url="https://xueshu.baidu.com/usercenter/paper/show?paperid=3821a90f58762386e257eb4e6fa11f79",
basic_url="https://xueshu.baidu.com", max_depth=5, tot_papers=10, wait_time=2):
self.init_url = init_url
self.basic_url = basic_url
self.max_depth = max_depth
self.tot_papers = tot_papers
self.wait_time = wait_time
self.url_pool = Url_pool()
self.papers = []
def crawl(self):
cur_depth = 0
self.papers.append(self.parse_url(self.init_url, cur_depth))
while len(self.papers) < self.tot_papers:
url = self.url_pool.get_url()
cur_depth = url.depth
self.papers.append(self.parse_url(url.url, cur_depth))
self.store()
def parse_url(self, url, depth):
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
options = Options()
options.add_argument('--headless')
options.add_experimental_option('excludeSwitches', ['enable-logging'])
driver = webdriver.Chrome(options=options)
driver.implicitly_wait(self.wait_time)
driver.get(url)
soup = BeautifulSoup(driver.page_source, 'html.parser')
main_info = soup.find(name='div', attrs={"class":"main-info"})
title = main_info.find(name='h3').text.strip()
print(f"Crawling {len(self.papers)+1}/{self.tot_papers}----------Title: {title}")
try:
abstract = main_info.find(name='p', attrs={"class":"abstract"}).text.strip()
except Exception as e:
abstract = "No Abstract"
ref_num = main_info.find(name='p', attrs={"class":"ref-wr-num"}).text.strip()
if ref_num.endswith("万"):
ref_num = int(float(ref_num[:-1])*10000)
else:
ref_num = int(ref_num)
paper = Paper(url, title, ref_num, abstract)
rel_lists = soup.find(name='ul', attrs={"class":"related_lists"})
if rel_lists and depth < self.max_depth:
rel_urls = rel_lists.find_all(name='li')
for rel_url in rel_urls:
url = self.basic_url + rel_url.find(name='p', attrs={"class":"rel_title"}).find(name="a").get('href')
title = rel_url.find(name='p', attrs={"class":"rel_title"}).find(name="a").text.strip()
try:
ref_num = rel_url.find(name='div', attrs={"class":"sc_info"}).find(name="a").text.strip()
if ref_num.endswith("万"):
ref_num = int(float(ref_num[:-1])*10000)
else:
ref_num = int(ref_num)
except Exception as e:
ref_num = 0
self.url_pool.append_url(Url(url, title, ref_num, depth+1))
driver.quit()
return paper
def store(self, filename='result.txt', encoding='utf-8'):
self.papers.sort()
with open(filename, 'w', errors="ignore") as f:
for paper in self.papers:
f.write(f"Title: {paper.title}\n")
f.write(f"Abstract: {paper.abstract}\n")
f.write(f"Ref_num: {paper.ref_num}\n")
f.write(f"URL: {paper.url}\n")
f.write("\n")
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--max-depth", type=int, default=5, help="max_depth")
parser.add_argument("-t", "--tot-papers", type=int, default=10, help="tot_papers")
parser.add_argument("-w", "--wait-time", type=int, default=2, help="wait_time")
parser.add_argument("-i", "--init-url", type=str, default="https://xueshu.baidu.com/usercenter/paper/show?paperid=3821a90f58762386e257eb4e6fa11f79"
, help="init_url")
args = parser.parse_args()
crawler = PaperCrawler(init_url=args.init_url, max_depth=args.max_depth, tot_papers=args.tot_papers, wait_time=args.wait_time)
crawler.crawl()
| 3.453125
| 3
|
dialogs/top_level_dialog.py
|
Maxwingber/corobot
| 0
|
12777192
|
<gh_stars>0
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import base64
from datetime import date, time
from botbuilder.core import MessageFactory
from botbuilder.dialogs import (
WaterfallDialog,
DialogTurnResult,
WaterfallStepContext,
ComponentDialog,
ConfirmPrompt, Choice, ChoicePrompt, ChoiceFactory, FindChoicesOptions, ListStyle, DialogTurnStatus)
from botbuilder.dialogs.prompts import PromptOptions, TextPrompt, NumberPrompt
from data_models import UserProfile
from data_models import PersonalData
from dialogs.contact_to_infected import ContactsSelectionDialog
from dialogs.symptoms_selection_dialog import SymptomsSelectionDialog
from dialogs.riskcountry_selection_dialog import RiskCountrySelectionDialog
from dialogs.personaldata import PersonalDataDialog
class TopLevelDialog(ComponentDialog):
def __init__(self, dialog_id: str = None):
super(TopLevelDialog, self).__init__(dialog_id or TopLevelDialog.__name__)
# Key name to store this dialogs state info in the StepContext
self.USER_INFO = "value-userInfo"
self.add_dialog(TextPrompt(TextPrompt.__name__))
self.add_dialog(NumberPrompt(NumberPrompt.__name__))
choice = ChoicePrompt(ChoicePrompt.__name__)
choice.recognizer_options = FindChoicesOptions(allow_partial_matches=True)
self.add_dialog(choice)
self.add_dialog(SymptomsSelectionDialog(SymptomsSelectionDialog.__name__))
self.add_dialog(ContactsSelectionDialog(ContactsSelectionDialog.__name__))
self.add_dialog(PersonalDataDialog(PersonalDataDialog.__name__))
self.add_dialog(RiskCountrySelectionDialog(RiskCountrySelectionDialog.__name__))
self.add_dialog(
WaterfallDialog(
"WFDialog",
[
self.name_step,
self.age_step,
self.confirm_riskcountry_step,
self.start_riskcountry_selection_step,
self.start_symptom_selection_step,
self.temparature_step,
self.start_contacts_step,
self.job_claim_step,
self.job_type_step,
self.personal_data_step,
self.acknowledgement_step,
],
)
)
self.initial_dialog_id = "WFDialog"
async def name_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
# Create an object in which to collect the user's information within the dialog.
step_context.values[self.USER_INFO] = UserProfile()
# Ask the user to enter their name.
prompt_options = PromptOptions(
prompt=MessageFactory.text("Wie heißen Sie denn?")
)
return await step_context.prompt(TextPrompt.__name__, prompt_options)
async def age_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
# Set the user's name to what they entered in response to the name prompt.
user_profile = step_context.values[self.USER_INFO]
user_profile.name = step_context.result
# Ask the user to enter their age.
prompt_options = PromptOptions(
prompt=MessageFactory.text("Hallo " + user_profile.name + "! Wie alt sind Sie?"),
retry_prompt=MessageFactory.text("Bitte geben Sie Ihr Alter als Zahl an.")
)
return await step_context.prompt(NumberPrompt.__name__, prompt_options)
async def confirm_riskcountry_step(
self, step_context: WaterfallStepContext
) -> DialogTurnResult:
user_profile: UserProfile = step_context.values[self.USER_INFO]
user_profile.age = int(step_context.result)
prompt_options = PromptOptions(
choices = [Choice("Ja"), Choice("Nein")],
prompt = MessageFactory.text("Waren Sie dieses Jahr bereits im Ausland?")
)
return await step_context.begin_dialog(ChoicePrompt.__name__, prompt_options)
async def start_riskcountry_selection_step(
self, step_context: WaterfallStepContext
) -> DialogTurnResult:
print("[DEBUG] Received by German choice prompt: " + step_context.result.value)
riskcountry_true = step_context.result.value == "Ja"
if not riskcountry_true:
print("[DEBUG] Skipping risk country selection")
return await step_context.next([[],[]])
else:
print("[DEBUG] Entering risk country selection")
return await step_context.begin_dialog(RiskCountrySelectionDialog.__name__)
async def start_symptom_selection_step(
self, step_context: WaterfallStepContext
) -> DialogTurnResult:
# Set the user's age to what they entered in response to the age prompt.
print("[DEBUG] Arrived in symptom selection")
print("[DEBUG] Risk countries dialog result is " + str(step_context.result))
user_profile: UserProfile = step_context.values[self.USER_INFO]
user_profile.risk_countries = step_context.result[0]
user_profile.risk_country_returndates = step_context.result[1]
if user_profile.risk_countries is not None and len(user_profile.risk_countries) > 0:
for single_date in user_profile.risk_country_returndates:
print("[DEBUG] Looking at return date " + single_date)
print("[DEBUG] Time diff return date: " + str(
int(date.today().strftime("%Y%m%d")) - int(single_date.replace("-", ""))))
if int(date.today().strftime("%Y%m%d")) - int(single_date.replace("-", "")) <= 14:
print("[DEBUG] Set risk country bool to True")
user_profile.risk_countries_bool = True
print("[DEBUG] Risk countries and returndates are\n" + str(user_profile.risk_countries) + "\n" + str(user_profile.risk_country_returndates))
# Otherwise, start the review selection dialog.
return await step_context.begin_dialog(SymptomsSelectionDialog.__name__)
async def temparature_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
# Set the user's name to what they entered in response to the name prompt.
user_profile: UserProfile = step_context.values[self.USER_INFO]
user_profile.symptoms = step_context.result[0]
user_profile.symptoms_dates = step_context.result[1]
print("[DEBUG] Symptoms are " + str(user_profile.symptoms))
print("[DEBUG] Corresponding dates are " + str(user_profile.symptoms))
if user_profile.symptoms is not None and len(user_profile.symptoms) > 0 and (any(user_profile.symptoms) is x for x in ['Husten', 'Lungenentzündung', 'Fieber']):
print("[DEBUG] Setting critical symtoms bool to true with symptoms " + str(user_profile.symptoms))
user_profile.critical_symptoms_bool = True
if "Fieber" in user_profile.symptoms:
prompt_options = PromptOptions(
prompt=MessageFactory.text("Wie hoch ist Ihr Fieber in Grad Celsius (°C)?")
)
return await step_context.prompt(TextPrompt.__name__, prompt_options)
else:
print("[DEBUG] Skipping fever temparature input")
return await step_context.next("0")
async def start_contacts_step(
self, step_context: WaterfallStepContext
) -> DialogTurnResult:
# Set the user's age to what they entered in response to the age prompt.
user_profile: UserProfile = step_context.values[self.USER_INFO]
user_profile.fever_temp = float(step_context.result.replace(",", "."))
# tart the contacts dialog.
return await step_context.begin_dialog(ContactsSelectionDialog.__name__)
async def job_claim_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
user_profile: UserProfile = step_context.values[self.USER_INFO]
# Storing contacts and setting bools
contact_dates = step_context.result
user_profile.contact_risk_1_date = contact_dates[0]
user_profile.contact_risk_2_date = contact_dates[1]
print("[DEBUG] Current date " + date.today().strftime("%Y%m%d"))
if contact_dates[0] is not None:
print("[DEBUG] " + contact_dates[0])
print("[DEBUG] Time diff risk contact 1: " + str(int(date.today().strftime("%Y%m%d")) - int(user_profile.contact_risk_1_date.replace("-", ""))))
if int(date.today().strftime("%Y%m%d")) - int(user_profile.contact_risk_1_date.replace("-", "")) <= 14:
user_profile.contact_risk_1_bool = True
if contact_dates[1] is not None:
print("[DEBUG] " + contact_dates[1])
print("[DEBUG] Time diff risk contact 2: " + str(int(date.today().strftime("%Y%m%d")) - int(user_profile.contact_risk_2_date.replace("-", ""))))
if int(date.today().strftime("%Y%m%d")) - int(user_profile.contact_risk_2_date.replace("-", "")) <= 14:
user_profile.contact_risk_2_bool = True
return await step_context.begin_dialog(ChoicePrompt.__name__, PromptOptions(
prompt=MessageFactory.text("Arbeiten Sie in einem systemkritischen Bereich?"),
choices=[Choice("Ja"), Choice("Nein")]
))
async def job_type_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
if step_context.result.value == "Ja":
print("[DEBUG] Recognized system cricital job claim")
return await step_context.begin_dialog(ChoicePrompt.__name__, PromptOptions(
prompt=MessageFactory.text("Zu welcher systemkritischen Gruppe gehören Sie?"),
choices=["Polizei", "Feuerwehr", "RichterIn", "Staatsanwälte", "Justizvollzug", "Rettungsdienst", "THW",
"Katastrophenschutz", "Mediziner", "Pfleger", "Apotheher", "**Keine**"],
style=ListStyle.list_style
))
else:
return await step_context.next(Choice("**Keine**"))
async def personal_data_step(self, step_context: WaterfallStepContext) -> DialogTurnResult:
# Set the user's company selection to what they entered in the review-selection dialog.
user_profile: UserProfile = step_context.values[self.USER_INFO]
if step_context.result.value != "**Keine**":
user_profile.critical_job = step_context.result.value
# If the user was in contact with a confirmed case in the past 14 days, he needs to add his personal data and contact the GA
if user_profile.contact_risk_1_bool is True:
# Thank them for participating.
await step_context.context.send_activity(
MessageFactory.text(
f"Da Sie als Kontaktperson der Kategorie 1 eingestuft werden, **melden Sie sich bitte sofort bei Ihrem zuständigen Gesundheitsamt**. Außerdem bitten wir Sie noch einige persönliche Daten für die Übermittlung an das Gesundheitsamt bereitzustellen. **Überwachen Sie bitte zudem Ihre Symptome**, **verlassen Sie Ihre Wohnung so wenig wie möglich** und **reduzieren Sie Ihren Kontakt zu anderen Personen auf das Nötigste**. Empfehlungen zu Ihrem weiteren Handeln finden Sie auf den Seiten des Robert Koch-Instituts (rki.de)")
)
# Start the personal data dialog.
return await step_context.begin_dialog(PersonalDataDialog.__name__)
if user_profile.risk_countries_bool is True and user_profile.critical_symptoms_bool is True:
# Thank them for participating.
await step_context.context.send_activity(
MessageFactory.text(
f"Da Sie sich in den letzten 14 Tagen in einer Risikoregion aufgehalten haben und für Covid-19-typische Symptome zeigen, **melden Sie sich bitte bei Ihrem zuständigen Gesundheitsamt**. Außerdem bitten wir Sie noch einige persönliche Daten für die Übermittlung an das Gesundheitsamt bereitzustellen. **Überwachen Sie bitte zudem Ihre Symptome**, **verlassen Sie Ihre Wohnung so wenig wie möglich** und **reduzieren Sie Ihren Kontakt zu anderen Personen auf das Nötigste**. Empfehlungen zu Ihrem weiteren Handeln finden Sie auf den Seiten des Robert Koch-Instituts (rki.de)")
)
# Start the personal data dialog.
return await step_context.begin_dialog(PersonalDataDialog.__name__)
if user_profile.contact_risk_2_bool is True:
# Thank them for participating.
await step_context.context.send_activity(
MessageFactory.text(
f"Bitte warten Sie ab, ob sich Ihre Kontaktperson als bestätigter Fall herausstellt. Sollte sich der Fall bestätigen, melden Sie sich bitte bei Ihrem zuständigen Gesundheitsamt. Für diesen Fall bitten wir Sie noch einige persönliche Daten für die Übermittlung an das Gesundheitsamt bereitzustellen. **Überwachen Sie zudem bitte Ihre Symptome**, **verlassen Sie Ihre Wohnung so wenig wie möglich** und **reduzieren Sie Ihren Kontakt zu anderen Personen auf das Nötigste**. Empfehlungen zu Ihrem weiteren Handeln finden Sie auf den Seiten des Robert Koch-Instituts (rki.de)")
)
# Start the personal data dialog.
return await step_context.begin_dialog(PersonalDataDialog.__name__)
if user_profile.critical_symptoms_bool is True and user_profile.critical_job is True:
# Thank them for participating.
await step_context.context.send_activity(
MessageFactory.text(
f"Sie gelten nicht als Kontaktperson, arbeiten jedoch in einem systemkritischen Beruf. Bitte **melden Sie sich bei Ihrem zuständigen Gesundheitsamt**. Außerdem bitten wir Sie noch einige persönliche Daten für die Übermittlung an das Gesundheitsamt bereitzustellen. Empfehlungen zu Ihrem weiteren Handeln finden Sie auf den Seiten des Robert Koch-Instituts (rki.de)")
)
# Start the personal data dialog.
return await step_context.begin_dialog(PersonalDataDialog.__name__)
if user_profile.risk_countries_bool is True:
# Thank them for participating.
await step_context.context.send_activity(
MessageFactory.text(
f"Da Sie sich in den letzten 14 Tagen in einer Risikoregion aufgehalten haben, **überwachen Sie bitte ob Sie Covid-19 typische Symptome entwickeln**, **verlassen Sie Ihre Wohnung so wenig wie möglich** und **reduzieren Sie Ihren Kontakt zu anderen Personen auf das Nötigste**. Empfehlungen zu Ihrem weiteren Handeln finden Sie auf den Seiten des Robert Koch-Instituts (rki.de)")
)
# Start the personal data dialog.
return await step_context.next(PersonalData())
if user_profile.critical_symptoms_bool is True and user_profile.age > 59:
# Thank them for participating.
await step_context.context.send_activity(
MessageFactory.text(
f"Sie gelten nicht als Kontaktperson, gehören jedoch zu einer erhöhten Risikogruppe. Bitte **überwachen Sie Ihre Symptome**, **verlassen Sie Ihre Wohnung so wenig wie möglich** und **reduzieren Sie Ihren Kontakt zu anderen Personen auf das Nötigste**. Empfehlungen zu Ihrem weiteren Handeln finden Sie auf den Seiten des Robert Koch-Instituts (rki.de)")
)
# No personal data required. Return empty personal data.
return await step_context.next(PersonalData())
if user_profile.critical_symptoms_bool is True:
# Thank them for participating.
await step_context.context.send_activity(
MessageFactory.text(
f"Sie gelten nicht als Kontaktperson. Bitte **überwachen Sie Ihre Symptome**, **verlassen Sie Ihre Wohnung so wenig wie möglich** und **reduzieren Sie Ihren Kontakt zu anderen Personen auf das Nötigste**. Empfehlungen zu Ihrem weiteren Handeln finden Sie auf den Seiten des Robert Koch-Instituts (rki.de)")
)
# No personal data required. Return empty personal data.
return await step_context.next(PersonalData())
# No personal data required. Return empty personal data.
else:
return await step_context.next(PersonalData())
async def acknowledgement_step(
self, step_context: WaterfallStepContext
) -> DialogTurnResult:
# Set the user's personal data to what they entered in the personal data dialog.
user_profile: UserProfile = step_context.values[self.USER_INFO]
user_profile.personal_data = None #SCHAUEN OB NÖTIG
user_profile.personal_data = step_context.result
#time.sleep(1)
# Thank them for participating.
await step_context.context.send_activity(
MessageFactory.text(f"Danke für Ihre Mithilfe und das Beantworten der Fragen, {user_profile.name}. Bitte halten Sie sich an die aktuell geltenden Regelungen und Empfehlungen der Behörden und des Robert Koch-Instituts (rki.de).")
)
#time.sleep(1)
await step_context.context.send_activity(
MessageFactory.text(f"Bei weiterer Kommunikation mit Behörden können Sie folgende Zusammenfassung anhängen und sparen "
f"sich lästige erneute Nachfragen.")
)
ausgabe = "**Wichtige Daten für Ihr Gesundheitsamt**\n\n"
#ausgabe = "Ihre Angaben:"
try:
ausgabe += "\n\nName, Vorname: " + user_profile.personal_data.family_name + ", " + user_profile.personal_data.first_name
ausgabe += "\n\nGeburtsdatum: " + user_profile.personal_data.birthday
ausgabe += "\n\nGeschlecht: " + user_profile.personal_data.gender
ausgabe += "\n\nAdresse: " + user_profile.personal_data.street + ", " + user_profile.personal_data.zipcode + " " + user_profile.personal_data.city
ausgabe += "\n\nTelefonnr.: " + user_profile.personal_data.telephone
ausgabe += "\n\nEmail: " + user_profile.personal_data.email
except:
print("[DEBUG] no personal_data")
take_out = ""
take_out += "\n\nSymptome: "
if (len(user_profile.symptoms) > 0):
for i in range(0,len(user_profile.symptoms)):
take_out += user_profile.symptoms[i] + " seit " + user_profile.symptoms_dates[i] + ", "#
take_out = take_out[0:len(take_out)-2]
else:
take_out += "keine"
if (user_profile.fever_temp != 0.0):
take_out += "\n\nFiebertemperatur: " + str(user_profile.fever_temp).replace(".", ",") + "°C"
take_out += "\n\nBesuchte Risikogebiete: "
if (user_profile.risk_countries_bool):
for i in range(0, len(user_profile.risk_countries)):
take_out += user_profile.risk_countries[i] + " bis " + user_profile.risk_country_returndates[i] + ", "
take_out = take_out[0:len(take_out)-2]
else:
take_out += "keine"
ausgabe += take_out
ausgabe += "\n\nKontakt mit infizierter Person: "
if user_profile.contact_risk_1_date is not None:
ausgabe += "ja, am " + str(user_profile.contact_risk_1_date)
else:
ausgabe += "nein"
ausgabe += "\n\nKontakt mit Verdachtsperson: "
if user_profile.contact_risk_2_date is not None:
ausgabe += "ja, am " + str(user_profile.contact_risk_2_date)
else:
ausgabe += "nein"
ausgabe += "\n\nFunktionsträger: "
if user_profile.critical_job is not None:
ausgabe += user_profile.critical_job
else:
ausgabe += "nein"
#time.sleep(1)
await step_context.context.send_activity(
MessageFactory.text(ausgabe)
)
print("[DEBUG] Final user object created:\n" + str(user_profile.__dict__))
# Exit the dialog, returning the collected user information.
return await step_context.end_dialog(user_profile)
| 1.976563
| 2
|
applications/utopianIdentificationNumber.py
|
silvioedu/HackerRank-Regex-Practice
| 0
|
12777193
|
<filename>applications/utopianIdentificationNumber.py
import re
if __name__ == '__main__':
regex = r'^[a-z]{0,3}\d{2,8}[A-Z]{3,}$'
dict = {True: "VALID", False: "INVALID"}
for _ in range(int(input())):
print(dict[bool(re.search(regex, input()))])
| 3.5
| 4
|
benchmark/csv/pandas_read_all.py
|
tgcandido/time-series-with-arctic
| 0
|
12777194
|
<reponame>tgcandido/time-series-with-arctic<filename>benchmark/csv/pandas_read_all.py<gh_stars>0
import pandas as pd
import time
start = time.time()
df = pd.read_csv('finance.csv')
df['unix'] = pd.to_datetime(df['unix'])
df.set_index('unix', inplace=True)
elapsed = time.time() - start
print(f'read_csv took {elapsed} seconds')
| 2.40625
| 2
|
src/utils/__init__.py
|
SgtMilk/stock-prediction
| 0
|
12777195
|
<filename>src/utils/__init__.py<gh_stars>0
# Copyright (c) 2021 <NAME>. Licence included in root of package.
from .print_colors import Colors
from .get_base_path import get_base_path
| 1.289063
| 1
|
libapparmor/utils/test/test-dbus.py
|
pyronia-sys/libpyronia
| 0
|
12777196
|
<reponame>pyronia-sys/libpyronia<gh_stars>0
#!/usr/bin/python3
# ----------------------------------------------------------------------
# Copyright (C) 2015 <NAME> <<EMAIL>>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# ----------------------------------------------------------------------
import unittest
from collections import namedtuple
from common_test import AATest, setup_all_loops
from apparmor.rule.dbus import DbusRule, DbusRuleset
from apparmor.rule import BaseRule
from apparmor.common import AppArmorException, AppArmorBug
from apparmor.logparser import ReadLog
from apparmor.translations import init_translation
_ = init_translation()
exp = namedtuple('exp', ['audit', 'allow_keyword', 'deny', 'comment',
'access', 'all_access', 'bus', 'all_buses', 'path', 'all_paths', 'name', 'all_names', 'interface', 'all_interfaces', 'member', 'all_members', 'peername', 'all_peernames', 'peerlabel', 'all_peerlabels'])
# --- tests for single DbusRule --- #
class DbusTest(AATest):
def _compare_obj(self, obj, expected):
self.assertEqual(obj.allow_keyword, expected.allow_keyword)
self.assertEqual(obj.audit, expected.audit)
self.assertEqual(obj.deny, expected.deny)
self.assertEqual(obj.comment, expected.comment)
self.assertEqual(obj.access, expected.access)
self._assertEqual_aare(obj.bus, expected.bus)
self._assertEqual_aare(obj.path, expected.path)
self._assertEqual_aare(obj.name, expected.name)
self._assertEqual_aare(obj.interface, expected.interface)
self._assertEqual_aare(obj.member, expected.member)
self._assertEqual_aare(obj.peername, expected.peername)
self._assertEqual_aare(obj.peerlabel, expected.peerlabel)
self.assertEqual(obj.all_access, expected.all_access)
self.assertEqual(obj.all_buses, expected.all_buses)
self.assertEqual(obj.all_paths, expected.all_paths)
self.assertEqual(obj.all_names, expected.all_names)
self.assertEqual(obj.all_interfaces, expected.all_interfaces)
self.assertEqual(obj.all_members, expected.all_members)
self.assertEqual(obj.all_peernames, expected.all_peernames)
self.assertEqual(obj.all_peerlabels, expected.all_peerlabels)
def _assertEqual_aare(self, obj, expected):
if obj:
self.assertEqual(obj.regex, expected)
else:
self.assertEqual(obj, expected)
class DbusTestParse(DbusTest):
tests = [
# DbusRule object audit allow deny comment access all? bus all? path all? name all? interface all? member all? peername all? peerlabel all?
('dbus,' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, None, True, None, True)),
('dbus ( ),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, None, True, None, True)),
('dbus ( , ),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, None, True, None, True)),
('dbus send,' , exp(False, False, False, '', {'send'}, False, None, True, None, True, None, True, None, True, None, True, None, True, None, True)),
('dbus (send, receive),' , exp(False, False, False, '', {'send', 'receive'}, False, None, True, None, True, None, True, None, True, None, True, None, True, None, True)),
('dbus send bus=session,' , exp(False, False, False, '', {'send'}, False, 'session', False, None, True, None, True, None, True, None, True, None, True, None, True)),
('deny dbus send bus="session", # cmt' , exp(False, False, True , ' # cmt', {'send'}, False, 'session', False, None, True, None, True, None, True, None, True, None, True, None, True)),
('audit allow dbus peer=(label=foo),' , exp(True , True , False, '', None , True , None, True, None, True, None, True, None, True, None, True, None, True, 'foo', False)),
('dbus bus=session path=/foo/bar,' , exp(False, False, False, '', None , True , 'session', False, '/foo/bar', False, None, True, None, True, None, True, None, True, None, True)),
('dbus send bus=(session),' , exp(False, False, False, '', {'send'}, False, 'session', False, None, True, None, True, None, True, None, True, None, True, None, True)),
('dbus name=(SomeService),' , exp(False, False, False, '', None, True, None, True, None, True, 'SomeService',False, None, True, None, True, None, True, None, True)),
('dbus send bus=session peer=(label="foo"),' , exp(False, False, False, '', {'send'}, False, 'session', False, None, True, None, True, None, True, None, True, None, True, 'foo', False)),
('dbus send bus = ( session ) , ' , exp(False, False, False, '', {'send'}, False, 'session', False, None, True, None, True, None, True, None, True, None, True, None, True)),
('dbus path=/foo,' , exp(False, False, False, '', None , True , None, True, '/foo', False, None, True, None, True, None, True, None, True, None, True)),
('dbus eavesdrop bus=session,' , exp(False, False, False, '', {'eavesdrop'}, False, 'session', False, None, True, None, True, None, True, None, True, None, True, None, True)),
('dbus peer=(name=foo label=bar),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo', False, 'bar', False)),
('dbus peer=( name = foo label = bar ),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo', False, 'bar', False)),
('dbus peer=( name = foo , label = bar ),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo', False, 'bar', False)),
('dbus peer=(, name = foo , label = bar ,),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo', False, 'bar', False)),
('dbus peer=( name = foo, label = bar ),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo,', False, 'bar', False)), # XXX peername includes the comma
('dbus peer=(label=bar name=foo),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo', False, 'bar', False)),
('dbus peer=( label = bar name = foo ),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo', False, 'bar', False)),
('dbus peer=(, label = bar , name = foo ,),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo', False, 'bar', False)),
('dbus peer=(, label = bar, name = foo ),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo', False, 'bar,', False)), # XXX peerlabel includes the comma
('dbus peer=( label = bar , name = foo ),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo', False, 'bar', False)),
('dbus peer=( label = "bar" name = "foo" ),' , exp(False, False, False, '', None , True , None, True, None, True, None, True, None, True, None, True, 'foo', False, 'bar', False)),
]
def _run_test(self, rawrule, expected):
self.assertTrue(DbusRule.match(rawrule))
obj = DbusRule.parse(rawrule)
self.assertEqual(rawrule.strip(), obj.raw_rule)
self._compare_obj(obj, expected)
class DbusTestParseInvalid(DbusTest):
tests = [
('dbus foo,' , AppArmorException),
('dbus foo bar,' , AppArmorException),
('dbus foo int,' , AppArmorException),
('dbus send bar,' , AppArmorException),
('dbus send receive,' , AppArmorException),
('dbus peer=,' , AppArmorException),
('dbus peer=(label=foo) path=,' , AppArmorException),
('dbus (invalid),' , AppArmorException),
('dbus peer=,' , AppArmorException),
]
def _run_test(self, rawrule, expected):
self.assertTrue(DbusRule.match(rawrule)) # the above invalid rules still match the main regex!
with self.assertRaises(expected):
DbusRule.parse(rawrule)
class DbusTestParseFromLog(DbusTest):
def test_dbus_from_log(self):
parser = ReadLog('', '', '', '', '')
event = 'type=USER_AVC msg=audit(1375323372.644:157): pid=363 uid=102 auid=4294967295 ses=4294967295 msg=\'apparmor="DENIED" operation="dbus_method_call" bus="system" name="org.freedesktop.DBus" path="/org/freedesktop/DBus" interface="org.freedesktop.DBus" member="Hello" mask="send" pid=2833 profile="/tmp/apparmor-2.8.0/tests/regression/apparmor/dbus_service" peer_profile="unconfined" exe="/bin/dbus-daemon" sauid=102 hostname=? addr=? terminal=?\''
parsed_event = parser.parse_event(event)
self.assertEqual(parsed_event, {
'request_mask': None,
'denied_mask': 'send',
'error_code': 0,
'magic_token': 0,
'parent': 0,
'profile': '/tmp/apparmor-2.8.0/tests/regression/apparmor/dbus_service',
'bus': 'system',
'peer_profile': 'unconfined',
'operation': 'dbus_method_call',
'resource': None,
'info': None,
'aamode': 'REJECTING',
'time': 1375323372,
'active_hat': None,
'pid': 2833,
'task': 0,
'attr': None,
'name2': None,
'name': 'org.freedesktop.DBus',
'path': '/org/freedesktop/DBus',
'interface': 'org.freedesktop.DBus',
'member': 'Hello',
'family': None,
'protocol': None,
'sock_type': None,
})
# XXX send rules must not contain name conditional, but the log event includes it - how should we handle this in logparser.py?
# # access bus path name interface member peername peerlabel
# obj = DbusRule(parsed_event['denied_mask'], parsed_event['bus'], parsed_event['path'], parsed_event['name'], parsed_event['interface'], parsed_event['member'], parsed_event['peer_profile'], DbusRule.ALL, log_event=parsed_event)
# # DbusRule audit allow deny comment access all? bus all? path all? name all? interface all? member all? peername all? peerlabel all?
# expected = exp( False, False, False, '', {'send'}, False, 'system', False, '/org/freedesktop/DBus', False, 'org.freedesktop.DBus', False, 'org.freedesktop.DBus', False,
# 'Hello', False, 'unconfined', False, None, True)
# self._compare_obj(obj, expected)
# self.assertEqual(obj.get_raw(1), ' dbus send bus=system path=/org/freedesktop/DBus name=org.freedesktop.DBus member=Hello peer=(name=unconfined),')
class DbusFromInit(DbusTest):
tests = [
#DbusRule# access bus path name interface member peername peerlabel audit=, deny=, allow_keyword, comment=, log_event)
(DbusRule( 'send' , 'session', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL),
#exp# audit allow deny comment access all? bus all? path all? name all? interface all? member all? peername all? peerlabel all?
exp( False, False, False, '', {'send'}, False, 'session', False, None, True, None, True, None, True, None, True, None, True, None, True)),
#DbusRule# access bus path name interface member peername peerlabel audit=, deny=, allow_keyword, comment=, log_event)
(DbusRule(('send', 'receive'), 'session', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL),
#exp# audit allow deny comment access all? bus all? path all? name all? interface all? member all? peername all? peerlabel all?
exp( False, False, False, '', {'send', 'receive'}, False, 'session', False, None, True, None, True, None, True, None, True, None, True, None, True)),
#DbusRule# access bus path name interface member peername peerlabel audit=, deny=, allow_keyword, comment=, log_event)
(DbusRule(DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, '/int/face', '/mem/ber', '/peer/name', '/peer/label'),
#exp# audit allow deny comment access all? bus all? path all? name all? interface all? member all? peername all? peerlabel all?
exp( False, False, False, '', None , True , None, True, None, True, None, True, '/int/face',False, '/mem/ber', False, '/peer/name', False, '/peer/label', False)),
]
def _run_test(self, obj, expected):
self._compare_obj(obj, expected)
class InvalidDbusInit(AATest):
tests = [
# access bus path name interface member peername peerlabel expected exception
# empty fields
( ('', 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( ((), 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, '', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '/org/test', '', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '/org/test', 'com.aa.test', '', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '/org/test', 'com.aa.test', '/int/face', '', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '' ), AppArmorBug),
# whitespace fields
( (' ', 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, ' ', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', ' ', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '/org/test', ' ', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '/org/test', 'com.aa.test', ' ', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '/org/test', 'com.aa.test', '/int/face', ' ', '/peer/name', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', ' ', '/peer/label'), AppArmorBug),
( (DbusRule.ALL, 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', ' ' ), AppArmorBug),
# wrong type - dict()
( (dict(), 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), dict(), '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), 'session', dict(), 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), 'session', '/org/test', dict(), '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), 'session', '/org/test', 'com.aa.test', dict(), '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), 'session', '/org/test', 'com.aa.test', '/int/face', dict(), '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', dict(), '/peer/label'), AppArmorBug),
( (('send'), 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', dict() ), AppArmorBug),
# wrong type - None
( (None, 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( ((None), 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), None, '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), 'session', None, 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), 'session', '/org/test', None, '/int/face', '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), 'session', '/org/test', 'com.aa.test', None, '/mem/ber', '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), 'session', '/org/test', 'com.aa.test', '/int/face', None, '/peer/name', '/peer/label'), AppArmorBug),
( (('send'), 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', None, '/peer/label'), AppArmorBug),
( (('send'), 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', None ), AppArmorBug),
# bind conflicts with path, interface, member, peer name and peer label
( (('bind'), DbusRule.ALL, '/org/test', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL ), AppArmorException),
( (('bind'), DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, '/int/face', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL ), AppArmorException),
( (('bind'), DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, '/mem/ber', DbusRule.ALL, DbusRule.ALL ), AppArmorException),
( (('bind'), DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, '/peer/name', DbusRule.ALL ), AppArmorException),
( (('bind'), DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, '/peer/label'), AppArmorException),
# eavesdrop conflcts with path, name, interface, member, peer name and peer label
( (('eavesdrop'),DbusRule.ALL, '/org/test', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL ), AppArmorException),
( (('eavesdrop'),DbusRule.ALL, DbusRule.ALL, 'com.aa.test', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL ), AppArmorException),
( (('eavesdrop'),DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, '/int/face', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL ), AppArmorException),
( (('eavesdrop'),DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, '/mem/ber', DbusRule.ALL, DbusRule.ALL ), AppArmorException),
( (('eavesdrop'),DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, '/peer/name', DbusRule.ALL ), AppArmorException),
( (('eavesdrop'),DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, '/peer/label'), AppArmorException),
# send and receive conflict with name
( (('send'), DbusRule.ALL, DbusRule.ALL, 'com.aa.test', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL ), AppArmorException),
( (('receive'), DbusRule.ALL, DbusRule.ALL, 'com.aa.test', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL ), AppArmorException),
# misc
( (DbusRule.ALL, DbusRule.ALL, 'foo/bar', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL ), AppArmorException), # path doesn't start with /
( (('foo'), DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL ), AppArmorException), # invalid access keyword
( (('foo', 'send'), DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL ), AppArmorException), # valid + invalid access keyword
]
def _run_test(self, params, expected):
with self.assertRaises(expected):
DbusRule(params[0], params[1], params[2], params[3], params[4], params[5], params[6], params[7])
def test_missing_params_1(self):
with self.assertRaises(TypeError):
DbusRule('send')
def test_missing_params_2(self):
with self.assertRaises(TypeError):
DbusRule(('send'), 'session')
def test_missing_params_3(self):
with self.assertRaises(TypeError):
DbusRule(('send'), 'session', '/org/test')
def test_missing_params_4(self):
with self.assertRaises(TypeError):
DbusRule(('send'), 'session', '/org/test', 'com.aa.test')
def test_missing_params_5(self):
with self.assertRaises(TypeError):
DbusRule(('send'), 'session', '/org/test', 'com.aa.test', '/int/face')
def test_missing_params_6(self):
with self.assertRaises(TypeError):
DbusRule(('send'), 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber')
def test_missing_params_7(self):
with self.assertRaises(TypeError):
DbusRule(('send'), 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name')
class InvalidDbusTest(AATest):
def _check_invalid_rawrule(self, rawrule):
obj = None
self.assertFalse(DbusRule.match(rawrule))
with self.assertRaises(AppArmorException):
obj = DbusRule(DbusRule.parse(rawrule))
self.assertIsNone(obj, 'DbusRule handed back an object unexpectedly')
def test_invalid_dbus_missing_comma(self):
self._check_invalid_rawrule('dbus') # missing comma
def test_invalid_non_DbusRule(self):
self._check_invalid_rawrule('signal,') # not a dbus rule
def test_empty_data_1(self):
# access bus path name interface member peername peerlabel expected exception
obj = DbusRule(('send'), 'session', '/org/test', DbusRule.ALL, '/int/face', '/mem/ber', '/peer/name', '/peer/label')
obj.access = ''
# no access set, and ALL not set
with self.assertRaises(AppArmorBug):
obj.get_clean(1)
def test_empty_data_2(self):
obj = DbusRule(('send'), 'session', '/org/test', DbusRule.ALL, '/int/face', '/mem/ber', '/peer/name', '/peer/label')
obj.bus = ''
# no bus set, and ALL not set
with self.assertRaises(AppArmorBug):
obj.get_clean(1)
def test_empty_data_3(self):
obj = DbusRule(('send'), 'session', '/org/test', DbusRule.ALL, '/int/face', '/mem/ber', '/peer/name', '/peer/label')
obj.path = ''
# no path set, and ALL not set
with self.assertRaises(AppArmorBug):
obj.get_clean(1)
def test_empty_data_4(self):
obj = DbusRule(DbusRule.ALL, 'session', '/org/test', 'com.aa.test', '/int/face', '/mem/ber', '/peer/name', '/peer/label')
obj.name = ''
# no name set, and ALL not set
with self.assertRaises(AppArmorBug):
obj.get_clean(1)
def test_empty_data_5(self):
obj = DbusRule(('send'), 'session', '/org/test', DbusRule.ALL, '/int/face', '/mem/ber', '/peer/name', '/peer/label')
obj.interface = ''
# no interface set, and ALL not set
with self.assertRaises(AppArmorBug):
obj.get_clean(1)
def test_empty_data_6(self):
obj = DbusRule(('send'), 'session', '/org/test', DbusRule.ALL, '/int/face', '/mem/ber', '/peer/name', '/peer/label')
obj.member = ''
# no member set, and ALL not set
with self.assertRaises(AppArmorBug):
obj.get_clean(1)
def test_empty_data_7(self):
obj = DbusRule(('send'), 'session', '/org/test', DbusRule.ALL, '/int/face', '/mem/ber', '/peer/name', '/peer/label')
obj.peername = ''
# no peername set, and ALL not set
with self.assertRaises(AppArmorBug):
obj.get_clean(1)
def test_empty_data_8(self):
obj = DbusRule(('send'), 'session', '/org/test', DbusRule.ALL, '/int/face', '/mem/ber', '/peer/name', '/peer/label')
obj.peerlabel = ''
# no peerlabel set, and ALL not set
with self.assertRaises(AppArmorBug):
obj.get_clean(1)
class WriteDbusTest(AATest):
def _run_test(self, rawrule, expected):
self.assertTrue(DbusRule.match(rawrule), 'DbusRule.match() failed')
obj = DbusRule.parse(rawrule)
clean = obj.get_clean()
raw = obj.get_raw()
self.assertEqual(expected.strip(), clean, 'unexpected clean rule')
self.assertEqual(rawrule.strip(), raw, 'unexpected raw rule')
tests = [
# raw rule clean rule
(' dbus , # foo ' , 'dbus, # foo'),
(' audit dbus send,' , 'audit dbus send,'),
(' audit dbus (send ),' , 'audit dbus send,'),
(' audit dbus (send , receive ),' , 'audit dbus (receive send),'),
(' deny dbus send bus=session,# foo bar' , 'deny dbus send bus=session, # foo bar'),
(' deny dbus send bus=(session), ' , 'deny dbus send bus=session,'),
(' deny dbus send peer=(name=unconfined label=foo),' , 'deny dbus send peer=(name=unconfined label=foo),'),
(' deny dbus send interface = ( foo ),' , 'deny dbus send interface=foo,'),
(' deny dbus send ,# foo bar' , 'deny dbus send, # foo bar'),
(' allow dbus peer=(label=foo) ,# foo bar' , 'allow dbus peer=(label=foo), # foo bar'),
('dbus,' , 'dbus,'),
('dbus (receive),' , 'dbus receive,'),
('dbus (send),' , 'dbus send,'),
('dbus (send receive),' , 'dbus (receive send),'),
('dbus receive,' , 'dbus receive,'),
('dbus eavesdrop,' , 'dbus eavesdrop,'),
('dbus bind bus = foo name = bar,' , 'dbus bind bus=foo name=bar,'),
('dbus send peer=( label = /foo ) ,' , 'dbus send peer=(label=/foo),'),
('dbus (receive) member=baz,' , 'dbus receive member=baz,'),
('dbus send path = /foo,' , 'dbus send path=/foo,'),
('dbus receive peer=(label=foo),' , 'dbus receive peer=(label=foo),'),
('dbus (send receive) peer=(name=/usr/bin/bar),' , 'dbus (receive send) peer=(name=/usr/bin/bar),'),
('dbus (, receive ,,, send ,) interface=/sbin/baz,' , 'dbus (receive send) interface=/sbin/baz,'), # XXX leading and trailing ',' inside (...) causes error
# XXX add more complex rules
]
def test_write_manually_1(self):
# access bus path name interface member peername peerlabel expected exception
obj = DbusRule(('send'), 'session', '/org/test', DbusRule.ALL, '/int/face', '/mem/ber', '/peer/name', '/peer/label', allow_keyword=True)
expected = ' allow dbus send bus=session path=/org/test interface=/int/face member=/mem/ber peer=(name=/peer/name label=/peer/label),'
self.assertEqual(expected, obj.get_clean(2), 'unexpected clean rule')
self.assertEqual(expected, obj.get_raw(2), 'unexpected raw rule')
def test_write_manually_2(self):
# access bus path name interface member peername peerlabel expected exception
obj = DbusRule(('send', 'receive'), DbusRule.ALL, '/org/test', DbusRule.ALL, DbusRule.ALL, '/mem/ber', '/peer/name', DbusRule.ALL, allow_keyword=True)
expected = ' allow dbus (receive send) path=/org/test member=/mem/ber peer=(name=/peer/name),'
self.assertEqual(expected, obj.get_clean(2), 'unexpected clean rule')
self.assertEqual(expected, obj.get_raw(2), 'unexpected raw rule')
class DbusCoveredTest(AATest):
def _run_test(self, param, expected):
obj = DbusRule.parse(self.rule)
check_obj = DbusRule.parse(param)
self.assertTrue(DbusRule.match(param))
self.assertEqual(obj.is_equal(check_obj), expected[0], 'Mismatch in is_equal, expected %s' % expected[0])
self.assertEqual(obj.is_equal(check_obj, True), expected[1], 'Mismatch in is_equal/strict, expected %s' % expected[1])
self.assertEqual(obj.is_covered(check_obj), expected[2], 'Mismatch in is_covered, expected %s' % expected[2])
self.assertEqual(obj.is_covered(check_obj, True, True), expected[3], 'Mismatch in is_covered/exact, expected %s' % expected[3])
class DbusCoveredTest_01(DbusCoveredTest):
rule = 'dbus send,'
tests = [
# rule equal strict equal covered covered exact
('dbus,' , [ False , False , False , False ]),
('dbus send,' , [ True , True , True , True ]),
('dbus send member=unconfined,' , [ False , False , True , True ]),
('dbus send, # comment' , [ True , False , True , True ]),
('allow dbus send,' , [ True , False , True , True ]),
('dbus send,' , [ True , False , True , True ]),
('dbus send bus=session,' , [ False , False , True , True ]),
('dbus send member=(label=foo),' , [ False , False , True , True ]),
('audit dbus send,' , [ False , False , False , False ]),
('audit dbus,' , [ False , False , False , False ]),
('dbus receive,' , [ False , False , False , False ]),
('dbus member=(label=foo),' , [ False , False , False , False ]),
('audit deny dbus send,' , [ False , False , False , False ]),
('deny dbus send,' , [ False , False , False , False ]),
]
class DbusCoveredTest_02(DbusCoveredTest):
rule = 'audit dbus send,'
tests = [
# rule equal strict equal covered covered exact
( 'dbus send,' , [ False , False , True , False ]),
('audit dbus send,' , [ True , True , True , True ]),
( 'dbus send bus=session,' , [ False , False , True , False ]),
('audit dbus send bus=session,' , [ False , False , True , True ]),
( 'dbus,' , [ False , False , False , False ]),
('audit dbus,' , [ False , False , False , False ]),
('dbus receive,' , [ False , False , False , False ]),
]
class DbusCoveredTest_03(DbusCoveredTest):
rule = 'dbus send bus=session,'
tests = [
# rule equal strict equal covered covered exact
( 'dbus send bus=session,' , [ True , True , True , True ]),
('allow dbus send bus=session,' , [ True , False , True , True ]),
( 'dbus send,' , [ False , False , False , False ]),
( 'dbus,' , [ False , False , False , False ]),
( 'dbus send member=(label=foo),' , [ False , False , False , False ]),
('audit dbus,' , [ False , False , False , False ]),
('audit dbus send bus=session,' , [ False , False , False , False ]),
('audit dbus bus=session,' , [ False , False , False , False ]),
( 'dbus send,' , [ False , False , False , False ]),
( 'dbus,' , [ False , False , False , False ]),
]
class DbusCoveredTest_04(DbusCoveredTest):
rule = 'dbus,'
tests = [
# rule equal strict equal covered covered exact
( 'dbus,' , [ True , True , True , True ]),
('allow dbus,' , [ True , False , True , True ]),
( 'dbus send,' , [ False , False , True , True ]),
( 'dbus receive bus=session,' , [ False , False , True , True ]),
( 'dbus member=(label=foo),' , [ False , False , True , True ]),
( 'dbus send bus=session,' , [ False , False , True , True ]),
('audit dbus,' , [ False , False , False , False ]),
('deny dbus,' , [ False , False , False , False ]),
]
class DbusCoveredTest_05(DbusCoveredTest):
rule = 'deny dbus send,'
tests = [
# rule equal strict equal covered covered exact
( 'deny dbus send,' , [ True , True , True , True ]),
('audit deny dbus send,' , [ False , False , False , False ]),
( 'dbus send,' , [ False , False , False , False ]), # XXX should covered be true here?
( 'deny dbus receive,' , [ False , False , False , False ]),
( 'deny dbus,' , [ False , False , False , False ]),
]
class DbusCoveredTest_06(DbusCoveredTest):
rule = 'dbus send peer=(name=unconfined),'
tests = [
# rule equal strict equal covered covered exact
('dbus,' , [ False , False , False , False ]),
('dbus send,' , [ False , False , False , False ]),
('dbus send peer=(name=unconfined),' , [ True , True , True , True ]),
('dbus peer=(name=unconfined),' , [ False , False , False , False ]),
('dbus send, # comment' , [ False , False , False , False ]),
('allow dbus send,' , [ False , False , False , False ]),
('allow dbus send peer=(name=unconfined),' , [ True , False , True , True ]),
('allow dbus send peer=(name=/foo/bar),' , [ False , False , False , False ]),
('allow dbus send peer=(name=/**),' , [ False , False , False , False ]),
('allow dbus send peer=(name=**),' , [ False , False , False , False ]),
('dbus send,' , [ False , False , False , False ]),
('dbus send peer=(name=unconfined),' , [ True , False , True , True ]),
('dbus send bus=session,' , [ False , False , False , False ]),
('dbus send peer=(name=unconfined label=foo),' , [ False , False , True , True ]),
('audit dbus send peer=(name=unconfined),' , [ False , False , False , False ]),
('audit dbus,' , [ False , False , False , False ]),
('dbus receive,' , [ False , False , False , False ]),
('dbus peer=(label=foo),' , [ False , False , False , False ]),
('audit deny dbus send,' , [ False , False , False , False ]),
('deny dbus send,' , [ False , False , False , False ]),
]
class DbusCoveredTest_07(DbusCoveredTest):
rule = 'dbus send peer=(label=unconfined),'
tests = [
# rule equal strict equal covered covered exact
('dbus,' , [ False , False , False , False ]),
('dbus send,' , [ False , False , False , False ]),
('dbus send peer=(label=unconfined),' , [ True , True , True , True ]),
('dbus peer=(label=unconfined),' , [ False , False , False , False ]),
('dbus send, # comment' , [ False , False , False , False ]),
('allow dbus send,' , [ False , False , False , False ]),
('allow dbus send peer=(label=unconfined),' , [ True , False , True , True ]),
('allow dbus send peer=(label=/foo/bar),' , [ False , False , False , False ]),
('allow dbus send peer=(label=/**),' , [ False , False , False , False ]),
('allow dbus send peer=(label=**),' , [ False , False , False , False ]),
('dbus send,' , [ False , False , False , False ]),
('dbus send peer=(label=unconfined),' , [ True , False , True , True ]),
('dbus send bus=session,' , [ False , False , False , False ]),
('dbus send peer=(label=unconfined name=foo),' , [ False , False , True , True ]),
('audit dbus send peer=(label=unconfined),' , [ False , False , False , False ]),
('audit dbus,' , [ False , False , False , False ]),
('dbus receive,' , [ False , False , False , False ]),
('dbus peer=(label=foo),' , [ False , False , False , False ]),
('audit deny dbus send,' , [ False , False , False , False ]),
('deny dbus send,' , [ False , False , False , False ]),
]
class DbusCoveredTest_08(DbusCoveredTest):
rule = 'dbus send path=/foo/bar,'
tests = [
# rule equal strict equal covered covered exact
('dbus,' , [ False , False , False , False ]),
('dbus send,' , [ False , False , False , False ]),
('dbus send path=/foo/bar,' , [ True , True , True , True ]),
('dbus send path=/foo/*,' , [ False , False , False , False ]),
('dbus send path=/**,' , [ False , False , False , False ]),
('dbus send path=/what/*,' , [ False , False , False , False ]),
('dbus path=/foo/bar,' , [ False , False , False , False ]),
('dbus send, # comment' , [ False , False , False , False ]),
('allow dbus send,' , [ False , False , False , False ]),
('allow dbus send path=/foo/bar,' , [ True , False , True , True ]),
('dbus send,' , [ False , False , False , False ]),
('dbus send path=/foo/bar,' , [ True , False , True , True ]),
('dbus send path=/what/ever,' , [ False , False , False , False ]),
('dbus send bus=session,' , [ False , False , False , False ]),
('dbus send path=/foo/bar peer=(label=foo),' , [ False , False , True , True ]),
('audit dbus send path=/foo/bar,' , [ False , False , False , False ]),
('audit dbus,' , [ False , False , False , False ]),
('dbus receive,' , [ False , False , False , False ]),
('dbus peer=(label=foo),' , [ False , False , False , False ]),
('audit deny dbus send,' , [ False , False , False , False ]),
('deny dbus send,' , [ False , False , False , False ]),
]
class DbusCoveredTest_09(DbusCoveredTest):
rule = 'dbus send member=**,'
tests = [
# rule equal strict equal covered covered exact
('dbus,' , [ False , False , False , False ]),
('dbus send,' , [ False , False , False , False ]),
('dbus send member=/foo/bar,' , [ False , False , True , True ]),
('dbus send member=/foo/*,' , [ False , False , True , True ]),
('dbus send member=/**,' , [ False , False , True , True ]),
('dbus send member=/what/*,' , [ False , False , True , True ]),
('dbus member=/foo/bar,' , [ False , False , False , False ]),
('dbus send, # comment' , [ False , False , False , False ]),
('allow dbus send,' , [ False , False , False , False ]),
('allow dbus send member=/foo/bar,' , [ False , False , True , True ]),
('dbus send,' , [ False , False , False , False ]),
('dbus send member=/foo/bar,' , [ False , False , True , True ]),
('dbus send member=/what/ever,' , [ False , False , True , True ]),
('dbus send bus=session,' , [ False , False , False , False ]),
('dbus send member=/foo/bar peer=(label=foo),' , [ False , False , True , True ]),
('audit dbus send member=/foo/bar,' , [ False , False , False , False ]),
('audit dbus,' , [ False , False , False , False ]),
('dbus receive,' , [ False , False , False , False ]),
('dbus member=(label=foo),' , [ False , False , False , False ]),
('audit deny dbus send,' , [ False , False , False , False ]),
('deny dbus send,' , [ False , False , False , False ]),
]
class DbusCoveredTest_10(DbusCoveredTest):
rule = 'dbus (send, receive) interface=foo,'
tests = [
# rule equal strict equal covered covered exact
('dbus,' , [ False , False , False , False ]),
('dbus send,' , [ False , False , False , False ]),
('dbus send interface=foo,' , [ False , False , True , True ]),
('dbus receive bus=session interface=foo,' , [ False , False , True , True ]),
('dbus (receive,send) interface=foo,' , [ True , False , True , True ]),
('dbus (receive,send),' , [ False , False , False , False ]),
('dbus send bus=session,' , [ False , False , False , False ]),
('dbus send member=/foo/bar,' , [ False , False , False , False ]),
('dbus send member=/foo/*,' , [ False , False , False , False ]),
('dbus send member=/**,' , [ False , False , False , False ]),
('dbus send member=/what/*,' , [ False , False , False , False ]),
('dbus member=/foo/bar,' , [ False , False , False , False ]),
('dbus send, # comment' , [ False , False , False , False ]),
('allow dbus send,' , [ False , False , False , False ]),
('allow dbus send member=/foo/bar,' , [ False , False , False , False ]),
('dbus send,' , [ False , False , False , False ]),
('dbus send member=/foo/bar,' , [ False , False , False , False ]),
('dbus send member=/what/ever,' , [ False , False , False , False ]),
('dbus send bus=session,' , [ False , False , False , False ]),
('dbus send bus=session interface=foo,' , [ False , False , True , True ]),
('dbus send member=/foo/bar peer=(label=foo),' , [ False , False , False , False ]),
('audit dbus send member=/foo/bar,' , [ False , False , False , False ]),
('audit dbus,' , [ False , False , False , False ]),
('dbus receive,' , [ False , False , False , False ]),
('dbus peer=(label=foo),' , [ False , False , False , False ]),
('audit deny dbus send,' , [ False , False , False , False ]),
('deny dbus send,' , [ False , False , False , False ]),
]
class DbusCoveredTest_11(DbusCoveredTest):
rule = 'dbus name=/foo/bar,'
tests = [
# rule equal strict equal covered covered exact
('dbus,' , [ False , False , False , False ]),
('dbus name=/foo/bar,' , [ True , True , True , True ]),
('dbus name=/foo/*,' , [ False , False , False , False ]),
('dbus name=/**,' , [ False , False , False , False ]),
('dbus name=/what/*,' , [ False , False , False , False ]),
('dbus, # comment' , [ False , False , False , False ]),
('allow dbus,' , [ False , False , False , False ]),
('allow dbus name=/foo/bar,' , [ True , False , True , True ]),
('dbus ,' , [ False , False , False , False ]),
('dbus name=/foo/bar,' , [ True , False , True , True ]),
('dbus name=/what/ever,' , [ False , False , False , False ]),
('dbus bus=session,' , [ False , False , False , False ]),
('dbus name=/foo/bar peer=(label=foo),' , [ False , False , True , True ]),
('audit dbus name=/foo/bar,' , [ False , False , False , False ]),
('audit dbus,' , [ False , False , False , False ]),
('dbus receive,' , [ False , False , False , False ]),
('dbus peer=(label=foo),' , [ False , False , False , False ]),
('audit deny dbus,' , [ False , False , False , False ]),
('deny dbus,' , [ False , False , False , False ]),
]
class DbusCoveredTest_Invalid(AATest):
def AASetup(self):
# access bus path name interface member peername peerlabel expected exception
self.obj = DbusRule(('send', 'receive'), 'session', '/org/test', DbusRule.ALL, '/int/face', DbusRule.ALL, '/peer/name', '/peer/label', allow_keyword=True)
self.testobj = DbusRule(('send'), 'session', '/org/test', DbusRule.ALL, '/int/face', '/mem/ber', '/peer/name', '/peer/label', allow_keyword=True)
def test_borked_obj_is_covered_1(self):
self.testobj.access = ''
with self.assertRaises(AppArmorBug):
self.obj.is_covered(self.testobj)
def test_borked_obj_is_covered_2(self):
self.testobj.bus = ''
with self.assertRaises(AppArmorBug):
self.obj.is_covered(self.testobj)
def test_borked_obj_is_covered_3(self):
self.testobj.path = ''
with self.assertRaises(AppArmorBug):
self.obj.is_covered(self.testobj)
def test_borked_obj_is_covered_4(self):
# we need a different 'victim' because dbus send doesn't allow the name conditional we want to test here
self.obj = DbusRule(('bind'), 'session', DbusRule.ALL, '/name', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, allow_keyword=True)
self.testobj = DbusRule(('bind'), 'session', DbusRule.ALL, '/name', DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, DbusRule.ALL, allow_keyword=True)
self.testobj.name = ''
with self.assertRaises(AppArmorBug):
self.obj.is_covered(self.testobj)
def test_borked_obj_is_covered_5(self):
self.testobj.interface = ''
with self.assertRaises(AppArmorBug):
self.obj.is_covered(self.testobj)
def test_borked_obj_is_covered_6(self):
self.testobj.member = ''
with self.assertRaises(AppArmorBug):
self.obj.is_covered(self.testobj)
def test_borked_obj_is_covered_7(self):
self.testobj.peername = ''
with self.assertRaises(AppArmorBug):
self.obj.is_covered(self.testobj)
def test_borked_obj_is_covered_8(self):
self.testobj.peerlabel = ''
with self.assertRaises(AppArmorBug):
self.obj.is_covered(self.testobj)
def test_invalid_is_covered(self):
obj = DbusRule.parse('dbus send,')
testobj = BaseRule() # different type
with self.assertRaises(AppArmorBug):
obj.is_covered(testobj)
def test_invalid_is_equal(self):
obj = DbusRule.parse('dbus send,')
testobj = BaseRule() # different type
with self.assertRaises(AppArmorBug):
obj.is_equal(testobj)
class DbusLogprofHeaderTest(AATest):
tests = [
('dbus,', [ _('Access mode'), _('ALL'), _('Bus'), _('ALL'), _('Path'), _('ALL'), _('Name'), _('ALL'), _('Interface'), _('ALL'), _('Member'), _('ALL'), _('Peer name'), _('ALL'), _('Peer label'), _('ALL')]),
('dbus (send receive),', [ _('Access mode'), 'receive send', _('Bus'), _('ALL'), _('Path'), _('ALL'), _('Name'), _('ALL'), _('Interface'), _('ALL'), _('Member'), _('ALL'), _('Peer name'), _('ALL'), _('Peer label'), _('ALL')]),
('dbus send bus=session,', [ _('Access mode'), 'send', _('Bus'), 'session', _('Path'), _('ALL'), _('Name'), _('ALL'), _('Interface'), _('ALL'), _('Member'), _('ALL'), _('Peer name'), _('ALL'), _('Peer label'), _('ALL')]),
('deny dbus,', [_('Qualifier'), 'deny', _('Access mode'), _('ALL'), _('Bus'), _('ALL'), _('Path'), _('ALL'), _('Name'), _('ALL'), _('Interface'), _('ALL'), _('Member'), _('ALL'), _('Peer name'), _('ALL'), _('Peer label'), _('ALL')]),
('allow dbus send,', [_('Qualifier'), 'allow', _('Access mode'), 'send', _('Bus'), _('ALL'), _('Path'), _('ALL'), _('Name'), _('ALL'), _('Interface'), _('ALL'), _('Member'), _('ALL'), _('Peer name'), _('ALL'), _('Peer label'), _('ALL')]),
('audit dbus send bus=session,', [_('Qualifier'), 'audit', _('Access mode'), 'send', _('Bus'), 'session', _('Path'), _('ALL'), _('Name'), _('ALL'), _('Interface'), _('ALL'), _('Member'), _('ALL'), _('Peer name'), _('ALL'), _('Peer label'), _('ALL')]),
('audit deny dbus send,', [_('Qualifier'), 'audit deny', _('Access mode'), 'send', _('Bus'), _('ALL'), _('Path'), _('ALL'), _('Name'), _('ALL'), _('Interface'), _('ALL'), _('Member'), _('ALL'), _('Peer name'), _('ALL'), _('Peer label'), _('ALL')]),
('dbus bind name=bind.name,', [ _('Access mode'), 'bind', _('Bus'), _('ALL'), _('Path'), _('ALL'), _('Name'), 'bind.name', _('Interface'), _('ALL'), _('Member'), _('ALL'), _('Peer name'), _('ALL'), _('Peer label'), _('ALL')]),
('dbus send bus=session path=/path interface=aa.test member=ExMbr peer=(name=(peer.name)),',
[ _('Access mode'), 'send', _('Bus'), 'session', _('Path'), '/path', _('Name'), _('ALL'), _('Interface'), 'aa.test', _('Member'), 'ExMbr', _('Peer name'), 'peer.name',_('Peer label'), _('ALL')]),
('dbus send peer=(label=foo),', [ _('Access mode'), 'send', _('Bus'), _('ALL'), _('Path'), _('ALL'), _('Name'), _('ALL'), _('Interface'), _('ALL'), _('Member'), _('ALL'), _('Peer name'), _('ALL'), _('Peer label'), 'foo' ]),
]
def _run_test(self, params, expected):
obj = DbusRule._parse(params)
self.assertEqual(obj.logprof_header(), expected)
## --- tests for DbusRuleset --- #
class DbusRulesTest(AATest):
def test_empty_ruleset(self):
ruleset = DbusRuleset()
ruleset_2 = DbusRuleset()
self.assertEqual([], ruleset.get_raw(2))
self.assertEqual([], ruleset.get_clean(2))
self.assertEqual([], ruleset_2.get_raw(2))
self.assertEqual([], ruleset_2.get_clean(2))
def test_ruleset_1(self):
ruleset = DbusRuleset()
rules = [
'dbus peer=(label=foo),',
'dbus send,',
]
expected_raw = [
'dbus peer=(label=foo),',
'dbus send,',
'',
]
expected_clean = [
'dbus peer=(label=foo),',
'dbus send,',
'',
]
for rule in rules:
ruleset.add(DbusRule.parse(rule))
self.assertEqual(expected_raw, ruleset.get_raw())
self.assertEqual(expected_clean, ruleset.get_clean())
def test_ruleset_2(self):
ruleset = DbusRuleset()
rules = [
'dbus send peer=(label=foo),',
'allow dbus send,',
'deny dbus bus=session, # example comment',
]
expected_raw = [
' dbus send peer=(label=foo),',
' allow dbus send,',
' deny dbus bus=session, # example comment',
'',
]
expected_clean = [
' deny dbus bus=session, # example comment',
'',
' allow dbus send,',
' dbus send peer=(label=foo),',
'',
]
for rule in rules:
ruleset.add(DbusRule.parse(rule))
self.assertEqual(expected_raw, ruleset.get_raw(1))
self.assertEqual(expected_clean, ruleset.get_clean(1))
class DbusGlobTest(AATest):
def setUp(self):
self.maxDiff = None
self.ruleset = DbusRuleset()
def test_glob_1(self):
self.assertEqual(self.ruleset.get_glob('dbus send,'), 'dbus,')
# not supported or used yet
# def test_glob_2(self):
# self.assertEqual(self.ruleset.get_glob('dbus send raw,'), 'dbus send,')
def test_glob_ext(self):
with self.assertRaises(NotImplementedError):
# get_glob_ext is not available for dbus rules
self.ruleset.get_glob_ext('dbus send peer=(label=foo),')
#class DbusDeleteTest(AATest):
# pass
setup_all_loops(__name__)
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1.585938
| 2
|
optimade-python-tools/tests/validator/test_utils.py
|
attlevafritt/tfya92-groupa-optimade-python-tools
| 0
|
12777197
|
<reponame>attlevafritt/tfya92-groupa-optimade-python-tools
import pytest
from optimade.validator.utils import test_case as validator_test_case
from optimade.validator.utils import ResponseError
from optimade.validator.validator import ImplementationValidator
try:
import simplejson as json
except ImportError:
import json
@validator_test_case
def dummy_test_case(_, returns, raise_exception=None):
"""Dummy function that returns what is passed it,
optionally raising an exception.
"""
if raise_exception:
raise raise_exception
return returns
def test_normal_test_case():
"""Check test_case under normal conditions."""
validator = ImplementationValidator(base_url="http://example.org", verbosity=0)
output = dummy_test_case(validator, ([1, 2, 3], "message"), request="test_request")
assert validator.results.success_count == 1
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 0
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 0
assert output[0] == [1, 2, 3]
assert output[1] == "message"
def test_optional_test_case():
"""Check test_case for optional case."""
validator = ImplementationValidator(base_url="http://example.org", verbosity=0)
output = dummy_test_case(
validator, ("string response", "message"), request="test_request", optional=True
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 1
assert validator.results.failure_count == 0
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 0
assert output[0] == "string response"
assert output[1] == "message"
def test_ignored_test_case():
"""Check test_case is ignored when receiving `None`."""
validator = ImplementationValidator(base_url="http://example.org", verbosity=0)
# Test returns None, so should not increment success/failure
output = dummy_test_case(validator, (None, "message"), request="test_request")
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 0
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 0
assert output[0] is None
assert output[1] == "message"
def test_skip_optional_test_case():
"""Check test_case skips a test when optional."""
validator = ImplementationValidator(
base_url="http://example.org", verbosity=0, run_optional_tests=False
)
# Test is optional and validator should not be running optional tests, so it should
# return hardcoded (None, "skipping optional").
output = dummy_test_case(
validator, ({"test": "dict"}, "message"), request="test_request", optional=True
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 0
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 0
assert output[0] is None
assert output[1] == "skipping optional"
# Now check that the same test returns the correct values when not marked as optional
output = dummy_test_case(
validator, ({"test": "dict"}, "message"), request="test_request", optional=False
)
assert validator.results.success_count == 1
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 0
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 0
assert output[0] == {"test": "dict"}
assert output[1] == "message"
def test_expected_failure_test_case():
"""Check test_case reports a "failure" when `ResponseError` is raised."""
validator = ImplementationValidator(base_url="http://example.org", verbosity=0)
# Test is optional and validator should not be running optional tests, so it should
# return hardcoded (None, "skipping optional").
output = dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
raise_exception=ResponseError("Dummy error"),
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 1
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 0
assert output[0] is None
assert output[1] == "ResponseError: Dummy error"
assert (
validator.results.failure_messages[-1][0]
== "http://example.org/test_request - dummy_test_case - failed with error"
)
assert validator.results.failure_messages[-1][1] == "ResponseError: Dummy error"
output = dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
raise_exception=ResponseError("Dummy error"),
optional=True,
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 1
assert validator.results.optional_failure_count == 1
assert validator.results.internal_failure_count == 0
assert output[0] is None
assert output[1] == "ResponseError: Dummy error"
assert (
validator.results.optional_failure_messages[-1][0]
== "http://example.org/test_request - dummy_test_case - failed with error"
)
assert (
validator.results.optional_failure_messages[-1][1]
== "ResponseError: Dummy error"
)
output = dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
raise_exception=json.JSONDecodeError("Dummy JSON error", "{}", 0),
optional=True,
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 1
assert validator.results.optional_failure_count == 2
assert validator.results.internal_failure_count == 0
assert output[0] is None
assert (
output[1]
== "Critical: unable to parse server response as JSON. JSONDecodeError: Dummy JSON error: line 1 column 1 (char 0)"
)
assert (
validator.results.optional_failure_messages[-1][0]
== "http://example.org/test_request - dummy_test_case - failed with error"
)
assert (
validator.results.optional_failure_messages[-1][1]
== "Critical: unable to parse server response as JSON. JSONDecodeError: Dummy JSON error: line 1 column 1 (char 0)"
)
def test_unexpected_failure_test_case():
"""Check test_case catches unexpected errors as internal failures."""
validator = ImplementationValidator(base_url="http://example.org", verbosity=0)
# Raise some unexpected exception and make sure it is logged as an internal error
output = dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
raise_exception=FileNotFoundError("Unexpected error"),
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 0
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 1
assert output[0] is None
assert output[1] == "FileNotFoundError: Unexpected error"
assert (
validator.results.internal_failure_messages[-1][0]
== "http://example.org/test_request - dummy_test_case - failed with internal error"
)
assert (
validator.results.internal_failure_messages[-1][1]
== "FileNotFoundError: Unexpected error"
)
output = dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
raise_exception=FileNotFoundError("Unexpected error"),
optional=True,
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 0
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 2
assert output[0] is None
assert output[1] == "FileNotFoundError: Unexpected error"
assert (
validator.results.internal_failure_messages[-1][0]
== "http://example.org/test_request - dummy_test_case - failed with internal error"
)
assert (
validator.results.internal_failure_messages[-1][1]
== "FileNotFoundError: Unexpected error"
)
def test_multistage_test_case():
"""Check test_case's `multistage` functionality works as expected."""
validator = ImplementationValidator(base_url="http://example.org", verbosity=0)
# Test that multistage requests do nothing but return unless they've failed
output = dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
multistage=True,
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 0
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 0
assert output[0] == {"test": "dict"}
assert output[1] == "message"
output = dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
raise_exception=ResponseError("Stage of test failed"),
multistage=True,
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 1
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 0
assert output[0] is None
assert output[1] == "ResponseError: Stage of test failed"
assert (
validator.results.failure_messages[-1][0]
== "http://example.org/test_request - dummy_test_case - failed with error"
)
assert (
validator.results.failure_messages[-1][1]
== "ResponseError: Stage of test failed"
)
def test_fail_fast_test_case():
"""Check test_case's `fail_fast` feature works as intended."""
validator = ImplementationValidator(
base_url="http://example.org", verbosity=0, fail_fast=True
)
# Check that optional failures do not trigger fail fast
output = dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
raise_exception=ResponseError("Optional test failed"),
optional=True,
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 0
assert validator.results.optional_failure_count == 1
assert validator.results.internal_failure_count == 0
assert output[0] is None
assert output[1] == "ResponseError: Optional test failed"
assert validator.results.optional_failure_messages[-1][0] == (
"http://example.org/test_request - dummy_test_case - failed with error"
)
assert (
validator.results.optional_failure_messages[-1][1]
== "ResponseError: Optional test failed"
)
# Check that the same non-optional failures do trigger fail fast
with pytest.raises(SystemExit):
output = dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
raise_exception=ResponseError("Non-optional test failed"),
optional=False,
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 1
assert validator.results.optional_failure_count == 1
assert validator.results.internal_failure_count == 0
assert validator.results.failure_messages[-1][0] == (
"http://example.org/test_request - dummy_test_case - failed with error"
)
assert (
validator.results.failure_messages[-1][1]
== "ResponseError: Non-optional test failed"
)
# Check that an internal error also triggers fast
with pytest.raises(SystemExit):
output = dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
raise_exception=FileNotFoundError("Internal error"),
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 1
assert validator.results.optional_failure_count == 1
assert validator.results.internal_failure_count == 1
assert validator.results.internal_failure_messages[-1][0] == (
"http://example.org/test_request - dummy_test_case - failed with internal error"
)
assert (
validator.results.internal_failure_messages[-1][1]
== "FileNotFoundError: Internal error"
)
def test_that_system_exit_is_fatal_in_test_case():
"""Check that test_case treats `SystemExit` as fatal."""
validator = ImplementationValidator(
base_url="http://example.org", verbosity=0, fail_fast=False
)
with pytest.raises(SystemExit, match="Fatal error"):
dummy_test_case(
validator,
({"test": "dict"}, "message"),
request="test_request",
raise_exception=SystemExit("Fatal error"),
optional=True,
)
assert validator.results.success_count == 0
assert validator.results.optional_success_count == 0
assert validator.results.failure_count == 0
assert validator.results.optional_failure_count == 0
assert validator.results.internal_failure_count == 0
| 2.5625
| 3
|
pitop/miniscreen/oled/core/__init__.py
|
pi-top/pi-top-Python-SDK
| 28
|
12777198
|
<reponame>pi-top/pi-top-Python-SDK
from .device_controller import OledDeviceController
from .fps_regulator import FPS_Regulator
from .lock import MiniscreenLockFileMonitor
| 1.039063
| 1
|
ssh_interface/ssh.py
|
ilya-rarov/scylladb_installer
| 0
|
12777199
|
<filename>ssh_interface/ssh.py
import paramiko
from base64 import b64decode
class MissingAuthInformation(Exception):
pass
class MissingSudoPassword(Exception):
pass
class SSHConnection:
def __init__(self, host, port, user, password=None):
self._host = host
self._port = port
self._user = user
if password:
self._password = b64decode(password).decode('utf-8')
else:
self._password = password
if self._user == 'root':
self._sudo_mode = False
else:
self._sudo_mode = True
def execute_command(self, command):
with paramiko.SSHClient() as client:
client.set_missing_host_key_policy(paramiko.AutoAddPolicy)
if self._sudo_mode:
if self._password:
command = f"echo {self._password} | sudo -S --prompt='' " + command
else:
command = "sudo " + command
client.connect(hostname=self._host,
port=self._port,
username=self._user,
password=self._password)
standard_input, standard_output, standard_error = client.exec_command(command)
stdout = standard_output.readlines()
stderr = standard_error.readlines()
exit_code = standard_output.channel.recv_exit_status()
return stdout, stderr, exit_code
| 2.71875
| 3
|
seminars/04.15.2022/try_pymorphy.py
|
veronicamanukyan/2021-2-level-ctlr
| 0
|
12777200
|
<gh_stars>0
import time
from pathlib import Path
import pymorphy2
def main():
morph_analyzer = pymorphy2.MorphAnalyzer()
all_parses = morph_analyzer.parse('стали')
print(f'Analyzer found {len(all_parses)} different options of what this word means')
# Usually we should take the first one - it is correct for most of cases
parsing_result = morph_analyzer.parse('стали')[0]
# Parsing result has a Tag object, to write it to file, it should be converted to string first of all
print(parsing_result.tag)
# Inspect Tag object as it has many important attributes
print(parsing_result.tag.POS)
# If you do not understand English terms of morphological analysis, use Russian translation
print(parsing_result.tag.cyr_repr)
# To get just a normal form, use an attribute `normal_form`
print(parsing_result.normal_form)
# To get full Parse object for a normal form use another property: `normalized`
print(parsing_result.normalized)
# news from https://www.nn.ru/text/education/2022/04/07/71171432/
plain_text_path = Path(__file__).parent / 'test.txt'
with plain_text_path.open(encoding='utf-8') as f:
plain_text = f.read()
all_words = plain_text.split()
start = time.time()
for word in all_words:
print(f'{word}: {pymorphy2.MorphAnalyzer().parse(word)[0].tag}')
many_instances_time = time.time() - start
start = time.time()
morph_analyzer = pymorphy2.MorphAnalyzer()
for word in all_words:
print(f'{word}: {morph_analyzer.parse(word)[0].tag}')
single_instance_time = time.time() - start
print(f'Time spent (seconds) for MorphAnalyzer instance per each word: {many_instances_time}') # 4.3 sec
print(f'Time spent (seconds) for MorphAnalyzer instance per each word: {single_instance_time}') # 0.1 sec
print(f'Single instance is quicker in {many_instances_time / single_instance_time: .2f}x') # 41x
# Very interesting to read: https://pymorphy2.readthedocs.io/en/stable/internals/dict.html#id13
if __name__ == '__main__':
main()
| 3.21875
| 3
|
crack-data-structures-and-algorithms/leetcode/find_minimum_in_rotated_sorted_array_II_q154.py
|
Watch-Later/Eureka
| 20
|
12777201
|
# -*- coding: utf-8 -*-
# 0xCCCCCCCC
# Like Q153 but with possible duplicates.
def find_min(nums):
"""
:type nums: List[int]
:rtype: int
"""
l, r = 0, len(nums) - 1
while l < r and nums[l] >= nums[r]:
m = (l + r) // 2
if nums[m] > nums[r]:
l = m + 1
elif nums[m] < nums[r]:
r = m
else:
# When nums[m] == nums[r], we have only few cases.
# Try to prune cases as possible.
if nums[m] < nums[l]:
l += 1
r = m
# nums[l] = nums[m] = nums[r]
# Rules out one of same elements, and properties of array are preserved.
else:
r -= 1
return nums[l]
nums = [4,5,6,7,0,1,2]
print(find_min(nums))
nums = [3,4,5,1,2]
print(find_min(nums))
nums = [5,1,2]
print(find_min(nums))
nums = [5,2]
print(find_min(nums))
nums = [2,3,4,5,1]
print(find_min(nums))
nums = [1, 3, 5]
print(find_min(nums))
nums = [2,2,2,0,1]
print(find_min(nums))
nums = [3,3,1,3]
print(find_min(nums))
nums = [3,1,3,3,3]
print(find_min(nums))
nums = [4,4,4,4,4,4]
print(find_min(nums))
| 3.65625
| 4
|
test/CSRF-server.py
|
DanNegrea/PyRules
| 9
|
12777202
|
#!/usr/bin/env python
# Example server used to test Simple-CSRF-script.py and Advanced-CSRF-script.py
# GET creates the token
# POST verifies itand creates a new one
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from optparse import OptionParser
import string, random, re
html = """
<!DOCTYPE html>
<html>
<body>
<form action="/" method="POST">
Change address<br>
<input type="text" name="street" placeholder="Street">
<br>
<input type="text" name="city" placeholder="City">
<br>
<input type="text" name="zip" placeholder="ZIP">
<br>
<small>
<br>New Token<br>
<input type="text" name="CSRFtoken" value="$token">
<small>
<br><br>
<input type="submit" value="Submit">
<br>Message<br>
<textarea>$message</textarea>
</form>
</body>
</html>
"""
class RequestHandler(BaseHTTPRequestHandler):
token=""
def do_GET(self):
new_token = ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(32)])
self.token = new_token
print "token new "+self.token
response = string.Template(html)
response = response.substitute(token=new_token, message="")
self.send_response(200)
#self.send_header("Set-Cookie", "foo=bar")
self.end_headers()
self.wfile.write(response)
def do_POST(self):
request_path = self.path
request_headers = self.headers
content_length = request_headers.getheaders('content-length')
length = int(content_length[0]) if content_length else 0
post_body = self.rfile.read(length)
print "token searched "+self.token
search = re.compile("CSRFtoken=\"("+self.token+")\"")
match = search.findall(post_body)
self.send_response(200)
if match:
expired_token = match[0]
print "token found "+expired_token
message="Token was OK "+ expired_token
else:
message="No valid token found"
new_token = ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(32)])
self.token = new_token
response = string.Template(html)
response = response.substitute(token=new_token, message=message)
print "token rereshed "+self.token
self.end_headers()
self.wfile.write(response)
do_PUT = do_POST
do_DELETE = do_GET
def main():
port = 9090
print('Listening on :%s' % port)
server = HTTPServer(('', port), RequestHandler)
server.serve_forever()
if __name__ == "__main__":
print("Main")
main()
| 3.109375
| 3
|
user/mixins.py
|
calumlim/talentalps
| 0
|
12777203
|
from django.contrib.auth.mixins import AccessMixin
class StaffAccessMixin(AccessMixin):
def dispatch(self, request, *args, **kwargs):
if not (request.user.is_authenticated and request.user.is_staff):
return self.handle_no_permission()
return super().dispatch(request, *args, **kwargs)
class EmployerAccessMixin(AccessMixin):
def dispatch(self, request, *args, **kwargs):
if not (request.user.is_authenticated and request.user.userprofile.is_employer):
return self.handle_no_permission()
return super().dispatch(request, *args, **kwargs)
| 2.015625
| 2
|
src/cli.py
|
prettyirrelevant/flask-cookiecutter
| 2
|
12777204
|
from pathlib import Path
import click
from flask import current_app as app
from flask.cli import AppGroup, with_appcontext
blueprints_cli = AppGroup(
"blueprints", short_help="Creation and listing of blueprints."
)
@blueprints_cli.command("create")
@click.argument("name")
@click.option(
"-f",
"--full",
default=False,
show_default=True,
type=bool,
help="Whether the blueprint creation should be minimal",
)
@with_appcontext
def create_bp(name, full):
"""Creates a blueprint with the specified name"""
directory = Path(f"{app.config['BASE_DIR']}/src/{name}")
if not directory.exists():
directory.mkdir(parents=True, exist_ok=True)
click.echo("Created blueprint in {}".format(directory))
init_file = Path(f"{directory}/__init__.py")
with open(init_file, "a") as f:
if full:
lines = [
"from flask import Blueprint \n\n",
f"{name}_bp = Blueprint('{name}',__name__, template_folder='templates', static_folder='static', static_url_path='/static/{name}') \n\n",
"from . import views",
]
f.writelines(lines)
else:
lines = [
"from flask import Blueprint \n\n",
f"{name}_bp = Blueprint('{name}',__name__) \n\n",
"from . import views",
]
f.writelines(lines)
click.echo("Created __init__.py in {}".format(init_file))
if full:
templates_directory = Path(f"{directory}/templates/{name}")
templates_directory.mkdir(parents=True, exist_ok=True)
click.echo("Created templates directory in {}".format(templates_directory))
static_directory = Path(f"{directory}/static")
static_directory.mkdir()
click.echo("Created static directory in {}".format(static_directory))
views_file = Path(f"{directory}/views.py")
with open(views_file, "a") as f:
lines = [f"from . import {name}_bp"]
f.writelines(lines)
click.echo("Created views.py.py in {}".format(views_file))
else:
click.echo("Blueprint/directory exists already", err=True)
@blueprints_cli.command("list")
@with_appcontext
def list():
"""List registered blueprints in Flask app."""
bps = [_ for _ in app.blueprints.keys()]
click.echo(bps)
@blueprints_cli.command("delete")
@click.argument("name")
@with_appcontext
def delete(name):
"""Deletes a blueprint folder"""
directory = Path(f"{app.config['BASE_DIR']}/src/{name}")
if directory.exists():
rmdir_recursive(directory)
click.echo(f"Blueprint deleted in {directory}!")
else:
click.echo("Directory does not exist!", err=True)
def rmdir_recursive(directory):
for i in directory.iterdir():
if i.is_dir():
rmdir_recursive(i)
else:
i.unlink()
directory.rmdir()
| 2.78125
| 3
|
tests/test_Services.py
|
cossio/ProteoPy
| 0
|
12777205
|
<gh_stars>0
"""
Tests for the Services class
"""
from unittest import TestCase
import ProteoPy
class TestServices(TestCase):
"""
Contains tests for the Services class
"""
def setUp(self):
self.services = ProteoPy.Services()
def test_uniprot_id(self):
'''
Tests Services.uniprot_id method.
'''
self.assertEqual(self.services.uniprot_id('G6PD'), 'P11413')
self.assertEqual(self.services.uniprot_id('PGD'), 'P52209')
def test_uniprot_data(self):
'''
Tests Services.uniprot_data method.
'''
self.assertEqual(self.services.uniprot_data('O95336'), (27547.0, 258.0))
self.assertEqual(self.services.uniprot_data('P29401'), (67878.0, 623.0))
def test_goproteins(self):
'''
Tests Services.goproteins method.
'''
self.assertEqual(self.services.goproteins('GO:0009051'),
['O95336', 'P52209', 'P11413'])
self.assertEqual(self.services.goproteins('GO:0009052'),
['P49247', 'Q2QD12', 'P29401', 'P37837', 'Q96AT9', 'Q9UHJ6'])
def test_gogenes(self):
'''
Tests Services.gogenes method.
'''
self.assertEqual(self.services.gogenes('GO:0009051'),
['PGLS', '6PGL', 'HEL-S-304', 'PGD', '6PGD', 'G6PD', 'G6PD1'])
self.assertEqual(self.services.gogenes('GO:0009052'),
['RPIA', 'RPI', 'RPIAD', 'RPEL1', 'TKT', 'HEL-S-48', 'HEL107',
'SDDHD', 'TK', 'TKT1', 'TALDO1', 'TAL', 'TAL-H', 'TALDOR',
'TALH', 'RPE', 'RPE2-1', 'SHPK', 'CARKL', 'SHK'])
def test_uniprotToEC(self):
'''
Tests Services.uniprotToEC(...)
'''
self.assertEqual(self.services.uniprotToEC('P52790'), '2.7.1.1')
| 2.453125
| 2
|
w2v_setup/jsontest.py
|
derdav3/tf-sparql
| 5
|
12777206
|
def something(a,b):
if a > 11:
print a, b
return True
else:
return False
for a in xrange(10):
for b in xrange(20):
print a, b
if something(a, b):
# Break the inner loop...
break
else:
# Continue if the inner loop wasn't broken.
continue
# Inner loop was broken, break the outer.
break
| 3.9375
| 4
|
training.py
|
zeta1999/adversarial-robustness-by-design
| 8
|
12777207
|
<filename>training.py
import argparse
import torch
from utils import get_model, compute_score, cifar10, cifar100, return_path_to_folder
def training(net, train_dl, test_dl, device, n_epochs, optimizer, is_scheduler, milestones):
if is_scheduler:
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'min', patience=2, factor=0.5, threshold=0.01)
if milestones:
scheduler2 = torch.optim.lr_scheduler.MultiStepLR(optimizer, [45, 60], gamma=0.5)
print(f'- Total number of epochs = {n_epochs}')
for e in range(n_epochs):
loss_stat = 0
for i, (x, y) in enumerate(train_dl):
x, y = x.to(device), y.to(device)
optimizer.zero_grad()
y_hat = net(x)
loss = torch.nn.functional.cross_entropy(y_hat, y)
loss.backward()
optimizer.step()
loss_stat += loss.item()
loss_stat = loss_stat / len(train_dl)
if is_scheduler:
scheduler.step(loss_stat)
if milestones:
scheduler2.step()
train_score = compute_score(net, train_dl, device)
test_score = compute_score(net, test_dl, device)
print(
f'Epoch {e + 1}, loss = {loss_stat:.3f}, train = {train_score:.2f}%, '
f'test = {test_score:.2f}% lr = {optimizer.param_groups[0]["lr"]}')
return net
def features_loading(net, dataset, device, features_model):
" loads the features of a given model, it freezes them during training time to fine-tune the classifier"
conv_features, _ = get_model(model='Ablation', binary_layer=False, opu=False, n_epochs='_', dataset=dataset,
opu_output=8000, opu_input=1024, sign_back=False, device=device)
conv_features.load_state_dict(torch.load(path_to_folder + 'models/' + features_model + '.pt'))
conv_features.eval()
conv_features = conv_features.state_dict()
for name, param in net.named_parameters():
if name.split('.')[0] == 'features':
param.data = conv_features[name]
param.requires_grad = False
print('- Robust features loaded!')
return net
def exp(model, n_epochs, opu, batch_size, binary_layer, lr, optimizer, weight_decay, smoke_test,
opu_output, opu_input, is_scheduler, dataset, features_model, milestones):
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print(f'- Current torch.device is: {device}')
net, file_name = get_model(model=model, binary_layer=binary_layer, n_epochs=n_epochs, opu_output=opu_output,
opu_input=opu_input, device=device, dataset=dataset, opu=opu)
if features_model is not None:
net = features_loading(net, dataset, device, features_model)
if optimizer == 'SGD':
print(f'- Optimizer = {optimizer}, Starting lr = {lr}, Momentum = {0.9}, Weight decay = {weight_decay}')
optimizer = torch.optim.SGD(params=net.parameters(), lr=lr, momentum=0.9, weight_decay=weight_decay)
else:
print(f'- Optimizer = {optimizer}, Starting lr = {lr}, Weight decay = {weight_decay}')
optimizer = torch.optim.Adam(params=net.parameters(), lr=lr, weight_decay=weight_decay)
if smoke_test:
train_samples = 5000
else:
train_samples = None
if dataset == 'cifar10':
train_dl, test_dl = cifar10(batch_size=batch_size, num_workers=16, subsample=train_samples)
if dataset == 'cifar100':
train_dl, test_dl = cifar100(batch_size=batch_size, num_workers=16, subsample=train_samples)
net = training(net.to(device), train_dl, test_dl, device, n_epochs, optimizer, is_scheduler, milestones)
train_acc = compute_score(net, train_dl, device)
test_acc = compute_score(net, test_dl, device)
print(f'- Train acc {train_acc:.2f}%, Test acc {test_acc:.2f}%')
return train_acc, test_acc, net, file_name
def pars_args():
parser = argparse.ArgumentParser('parameters')
# str
parser.add_argument("-model", default='VGG16-OPU', type=str, help='Name of the trained net',
choices=['VGG-16', 'VGG16-OPU', 'VGG16-R-OPU', 'Ablation'])
parser.add_argument("-optimizer", default='SGD', type=str, help='Optimizer choice', choices=['SGD', 'ADAM'])
parser.add_argument("-dataset", default='cifar10', type=str, help='dataset', choices=['cifar10', 'cifar100'])
parser.add_argument("-features_model", default=None, type=str, help='model name to take features from')
# int
parser.add_argument("-n_epochs", default=100, type=int, help='Number of epochs')
parser.add_argument("-batch_size", default=128, type=int, help='Batch size')
parser.add_argument("-opu_output", default=512, type=int, help='Dimension of OPU output')
parser.add_argument("-opu_input", default=512, type=int, help='Dimension of OPU output')
parser.add_argument("-model_index", default=None, type=int, help='To save multiple models with same hyperparams')
parser.add_argument("-seed", default=None, type=int, help='Torch/numpy seed to ensure experiments reproducibility')
# float
parser.add_argument("-lr", default=1e-3, type=float, help='Learning rate')
parser.add_argument("-weight_decay", default=0.005, type=float, help='Weight decay for SGD')
# boolean switches.
parser.add_argument("-smoke-test", default=False, action='store_true', help='Reduce number of training samples')
parser.add_argument("-is_scheduler", default=True, action='store_false', help='Deactivates scheduler')
parser.add_argument("-binary_layer", default=False, action='store_true', help='Binary layer is active')
parser.add_argument("-opu", default=False, action='store_true', help='Needed for ablation models')
parser.add_argument("-milestones", default=False, action='store_true', help='Needed for ablation models')
args = parser.parse_args()
return args
if __name__ == '__main__':
import numpy as np
import pathlib
args = pars_args()
model_index = None
if args.model_index:
model_index = args.model_index
if args.seed is not None:
print(f'- Manual seed = {args.seed}')
torch.manual_seed(args.seed)
np.random.seed(args.seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
del args.model_index
del args.seed
path_to_folder = return_path_to_folder(args.dataset)
pathlib.Path(path_to_folder + 'accuracies').mkdir(parents=True, exist_ok=True)
pathlib.Path(path_to_folder + 'models').mkdir(parents=True, exist_ok=True)
train_accuracy, test_accuracy, model, file_name = exp(model=args.model, n_epochs=args.n_epochs,
batch_size=args.batch_size,
binary_layer=args.binary_layer, lr=args.lr, opu=args.opu,
optimizer=args.optimizer,
weight_decay=args.weight_decay, smoke_test=args.smoke_test,
opu_output=args.opu_output, opu_input=args.opu_input,
is_scheduler=args.is_scheduler, dataset=args.dataset,
features_model=args.features_model,
milestones=args.milestones)
if model_index:
file_name = file_name + f'__{model_index}'
np.savez(path_to_folder + 'accuracies/' + file_name, train_accuracy=train_accuracy, test_accuracy=test_accuracy)
torch.save(model.state_dict(), path_to_folder + 'models/' + file_name + '.pt')
print('model and accuracies saved.')
| 2.640625
| 3
|
meiduo_mall/utils/django_redis_demo.py
|
liusudo123/meiduo_project
| 0
|
12777208
|
<gh_stars>0
# 1. 导包
from django_redis import get_redis_connection
# 2. 链接
def test_django_redis():
client = get_redis_connection('default')
# 3. 曾删改查
client.set('django_redis_key', 'itcast')
print(client.get('django_redis_key'))
| 1.96875
| 2
|
app.py
|
dosterman09/web-scraping-challenge
| 0
|
12777209
|
<reponame>dosterman09/web-scraping-challenge<filename>app.py
from flask import Flask, render_template, redirect
import pymongo
import scrape_mars
app = Flask(__name__)
mongo = pymongo(app)
@app.route("/")
def index():
mars = collection.find_one()
return render_template("index.html", mars = mars)
@app.route("/scrape")
def scrape():
mars_data = scrape_mars.scrape()
mars.update({}, mars_data, upsert=True)
return redirect("/", code=302)
if __name__ == "__main__":
app.run(debug=True)
| 2.8125
| 3
|
sphinx/tello/source/_static/code/python/control-program/tello.py
|
oneoffcoder/books
| 26
|
12777210
|
import socket
import threading
import time
class Tello(object):
"""
Wrapper class to interact with the Tello drone.
"""
def __init__(self, local_ip, local_port, imperial=False,
command_timeout=.3,
tello_ip='192.168.10.1',
tello_port=8889):
"""
Binds to the local IP/port and puts the Tello into command mode.
:param local_ip: Local IP address to bind.
:param local_port: Local port to bind.
:param imperial: If True, speed is MPH and distance is feet.
If False, speed is KPH and distance is meters.
:param command_timeout: Number of seconds to wait for a response to a command.
:param tello_ip: Tello IP.
:param tello_port: Tello port.
"""
self.abort_flag = False
self.command_timeout = command_timeout
self.imperial = imperial
self.response = None
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.tello_address = (tello_ip, tello_port)
self.last_height = 0
self.socket.bind((local_ip, local_port))
# thread for receiving cmd ack
self.receive_thread = threading.Thread(target=self._receive_thread)
self.receive_thread.daemon = True
self.receive_thread.start()
self.socket.sendto(b'command', self.tello_address)
print ('sent: command')
def __del__(self):
"""
Closes the local socket.
:return: None.
"""
self.socket.close()
def _receive_thread(self):
"""
Listen to responses from the Tello.
Runs as a thread, sets self.response to whatever the Tello last returned.
:return: None.
"""
while True:
try:
self.response, _ = self.socket.recvfrom(3000)
except socket.error as exc:
print(f'Caught exception socket.error : {exc}')
def send_command(self, command):
"""
Send a command to the Tello and wait for a response.
:param command: Command to send.
:return: Response from Tello.
"""
print(f'>> send cmd: {command}')
self.abort_flag = False
timer = threading.Timer(self.command_timeout, self.set_abort_flag)
self.socket.sendto(command.encode('utf-8'), self.tello_address)
timer.start()
while self.response is None:
if self.abort_flag is True:
break
timer.cancel()
if self.response is None:
response = 'none_response'
else:
response = self.response.decode('utf-8')
self.response = None
return response
def set_abort_flag(self):
"""
Sets self.abort_flag to True.
Used by the timer in Tello.send_command() to indicate to that a response
timeout has occurred.
:return: None.
"""
self.abort_flag = True
def takeoff(self):
"""
Initiates take-off.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
return self.send_command('takeoff')
def set_speed(self, speed):
"""
Sets speed.
This method expects KPH or MPH. The Tello API expects speeds from
1 to 100 centimeters/second.
Metric: .1 to 3.6 KPH
Imperial: .1 to 2.2 MPH
:param speed: Speed.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
speed = float(speed)
if self.imperial is True:
speed = int(round(speed * 44.704))
else:
speed = int(round(speed * 27.7778))
return self.send_command(f'speed {speed}')
def rotate_cw(self, degrees):
"""
Rotates clockwise.
:param degrees: Degrees to rotate, 1 to 360.
:return:Response from Tello, 'OK' or 'FALSE'.
"""
return self.send_command(f'cw {degrees}')
def rotate_ccw(self, degrees):
"""
Rotates counter-clockwise.
:param degrees: Degrees to rotate, 1 to 360.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
return self.send_command(f'ccw {degrees}')
def flip(self, direction):
"""
Flips.
:param direction: Direction to flip, 'l', 'r', 'f', 'b'.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
return self.send_command(f'flip {direction}')
def get_response(self):
"""
Returns response of tello.
:return: Response of tello.
"""
response = self.response
return response
def get_height(self):
"""
Returns height(dm) of tello.
:return: Height(dm) of tello.
"""
height = self.send_command('height?')
height = str(height)
height = filter(str.isdigit, height)
try:
height = int(height)
self.last_height = height
except:
height = self.last_height
pass
return height
def get_battery(self):
"""
Returns percent battery life remaining.
:return: Percent battery life remaining.
"""
battery = self.send_command('battery?')
try:
battery = int(battery)
except:
pass
return battery
def get_flight_time(self):
"""
Returns the number of seconds elapsed during flight.
:return: Seconds elapsed during flight.
"""
flight_time = self.send_command('time?')
try:
flight_time = int(flight_time)
except:
pass
return flight_time
def get_speed(self):
"""
Returns the current speed.
:return: Current speed in KPH or MPH.
"""
speed = self.send_command('speed?')
try:
speed = float(speed)
if self.imperial is True:
speed = round((speed / 44.704), 1)
else:
speed = round((speed / 27.7778), 1)
except:
pass
return speed
def land(self):
"""
Initiates landing.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
return self.send_command('land')
def move(self, direction, distance):
"""
Moves in a direction for a distance.
This method expects meters or feet. The Tello API expects distances
from 20 to 500 centimeters.
Metric: .02 to 5 meters
Imperial: .7 to 16.4 feet
:param direction: Direction to move, 'forward', 'back', 'right' or 'left'.
:param distance: Distance to move.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
distance = float(distance)
if self.imperial is True:
distance = int(round(distance * 30.48))
else:
distance = int(round(distance * 100))
return self.send_command(f'{direction} {distance}')
def move_backward(self, distance):
"""
Moves backward for a distance.
See comments for Tello.move().
:param distance: Distance to move.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
return self.move('back', distance)
def move_down(self, distance):
"""
Moves down for a distance.
See comments for Tello.move().
:param distance: Distance to move.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
return self.move('down', distance)
def move_forward(self, distance):
"""
Moves forward for a distance.
See comments for Tello.move().
:param distance: Distance to move.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
return self.move('forward', distance)
def move_left(self, distance):
"""
Moves left for a distance.
See comments for Tello.move().
:param distance: Distance to move.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
return self.move('left', distance)
def move_right(self, distance):
"""
Moves right for a distance.
See comments for Tello.move().
:param distance: Distance to move.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
return self.move('right', distance)
def move_up(self, distance):
"""
Moves up for a distance.
See comments for Tello.move().
:param distance: Distance to move.
:return: Response from Tello, 'OK' or 'FALSE'.
"""
return self.move('up', distance)
| 3.3125
| 3
|
Program.py
|
bordaigorl/lemma9
| 0
|
12777211
|
<filename>Program.py
from NameReprManager import NameReprManager
from Definition import Definition
from Process import Process
from InclusionCheck import check_inclusion
from secrets_leaks import get_secret_definition, get_leak_definition, get_leak_proc
from Widening import widen_iteratively
class Program(object):
def __init__(self, definitions, limit, helpers=None):
self.definitions = definitions
secret_def = get_secret_definition()
self.definitions.append(secret_def)
leak_def = get_leak_definition()
self.definitions.append(leak_def)
self.helpers = helpers
self.limit = limit
self.name_repr_man = None
self.error_list = None
def __str__(self):
definitions_repr = '\n'.join(str(d) for d in self.definitions)
return definitions_repr + '\n' + str(self.limit)
def prettyprint_with_ids(self):
substitution = self.name_repr_man.get_substitution_with_ids()
pretty_limit = self.limit.rename(substitution)
print pretty_limit
def prettyprint_invariant(self):
substitution = self.name_repr_man.get_substitution_without_ids()
pretty_limit = self.limit.rename(substitution)
print pretty_limit.print_with_helper_defs()
def pretty_definitions(self):
substitution = self.name_repr_man.get_substitution_without_ids()
definitions = [d.rename(substitution) for d in self.definitions]
return definitions
def pretty_invariant_tex(self):
substitution = self.name_repr_man.get_substitution_without_ids()
pretty_limit = self.limit.rename(substitution)
return pretty_limit.print_with_helper_defs_tex()
def pretty_program_tex(self):
# definitions
representation = '\\[ \n\\begin{array}{lcl} \n'
for d in self.pretty_definitions():
representation += d.str_tex() + '; \\\\ \n'
representation += '\\end{array} \\]\n'
# invariant with helper definitions
representation += self.pretty_invariant_tex() + '\n'
return representation
def pretty_error_list(self):
if self.error_list is None:
self.error_list = self.limit.get_pretty_error_list_of_posts(self.name_repr_man)
return self.get_string_repr_errors()
def get_string_repr_errors(self):
assert self.error_list is not None
str_repr = str()
for error in self.error_list:
str_repr = str_repr + str(error) + '\n'
return str_repr
def rename_local_names(self):
map(lambda definition: definition.rename_name_repr(self.name_repr_man), self.definitions)
self.limit = self.limit.rename_name_repr(self.name_repr_man)
def rename_global_names(self):
global_names = self.limit.get_globalnames()
theta = self.name_repr_man.get_substitution_for_set(global_names)
self.limit = self.limit.rename(theta)
# by definition no global names in definitions
# self.definitions = map(lambda definition: definition.rename(theta), self.definitions)
def rename_name_repr(self):
self.name_repr_man = NameReprManager()
self.rename_global_names()
self.rename_local_names()
def is_an_invariant(self):
if self.error_list is None:
self.error_list = self.limit.get_pretty_error_list_of_posts(self.name_repr_man)
if self.error_list:
return False
else:
return True
def check_secrecy(self):
self.error_list = None # Flush error list
secret_def = get_secret_definition()
self.definitions.append(secret_def)
leak_def = get_leak_definition()
self.definitions.append(leak_def)
if self.is_an_invariant():
if check_inclusion(get_leak_proc(), self.limit):
return 'Leak in invariant'# Leak in invariant
else:
return 'Secret not leaked'
else:
return 'Not an invariant' # Not an invariant
def compute_error_list_for_invariant(self):
self.error_list = self.limit.get_pretty_error_list_of_posts(self.name_repr_man)
def widen_initial_configuration(self, counter = 15):
no_errors_left, program = widen_iteratively(self, counter)
return program
| 2.59375
| 3
|
myvenv/lib/python3.5/site-packages/allauth/socialaccount/providers/untappd/urls.py
|
tuvapp/tuvappcom
| 1
|
12777212
|
<reponame>tuvapp/tuvappcom
from allauth.socialaccount.providers.oauth2.urls import default_urlpatterns
from .provider import UntappdProvider
urlpatterns = default_urlpatterns(UntappdProvider)
| 1.289063
| 1
|
describe_dask/test_describe_dask.py
|
guy1992l/functions
| 25
|
12777213
|
<reponame>guy1992l/functions
from mlrun import code_to_function, new_function, import_function
from pathlib import Path
import os
DATA_URL = 'https://s3.wasabisys.com/iguazio/data/iris/iris_dataset.csv'
ARTIFACTS_PATH = 'artifacts'
PLOTS_PATH = ARTIFACTS_PATH + '/plots'
def _create_dask_func(uri):
dask_cluster_name = "dask-cluster"
dask_cluster = new_function(dask_cluster_name, kind='dask', image='mlrun/ml-models')
dask_cluster.spec.remote = False
dask_uri = uri
dask_cluster.export(dask_uri)
def _validate_paths(base_path, paths: {}):
for path in paths:
full_path = os.path.join(base_path, path)
if Path(full_path).is_file():
print("File exist")
else:
raise FileNotFoundError
def test_code_to_function_describe_dask():
dask_uri = "dask_func.yaml"
_create_dask_func(dask_uri)
fn = code_to_function(filename="describe_dask.py", kind='local')
fn.spec.command = "describe_dask.py"
fn.run(inputs={"dataset": DATA_URL},
params={'update_dataset': True,
'label_column': 'label',
'dask_function': dask_uri,
},
handler="summarize",
)
_validate_paths(base_path='plots', paths={'corr.html',
'correlation-matrix.csv',
'hist.html',
'imbalance.html',
'imbalance-weights-vec.csv',
'violin.html'})
def test_import_function_describe_dask():
dask_uri = "dask_func.yaml"
_create_dask_func(dask_uri)
fn = import_function('function.yaml')
fn.run(inputs={"dataset": DATA_URL},
params={'update_dataset': True,
'label_column': 'label',
'dask_function': dask_uri,
},
handler="summarize",
artifact_path=os.getcwd() + '/artifacts'
, local=True
)
_validate_paths(base_path=PLOTS_PATH, paths={'corr.html',
'correlation-matrix.csv',
'hist.html',
'imbalance.html',
'imbalance-weights-vec.csv',
'violin.html'})
| 2.625
| 3
|
notecoin/huobi/connection/core.py
|
notechats/notecoin
| 0
|
12777214
|
<gh_stars>0
import logging
from notecoin.huobi.connection.impl import (RestApiRequest, WebsocketManage,
WebsocketRequest,
WebSocketWatchDog, call_sync,
call_sync_perforence_test)
from notecoin.huobi.constant.system import (ApiVersion, HttpMethod,
WebSocketDefine,
get_default_server_url)
from notecoin.huobi.utils.api_signature import create_signature
from notecoin.huobi.utils.huobi_api_exception import HuobiApiException
from notecoin.huobi.utils.url_params_builder import UrlParamsBuilder
from notetool.log import logger
class RestApiSyncClient(object):
def __init__(self, api_key=None, secret_key=None, url=None, *args, **kwargs):
"""
Create the request client instance.
:param kwargs: The option of request connection.
api_key: The public key applied from Huobi.
secret_key: The private key applied from Huobi.
url: The URL name like "https://api.huobi.pro".
performance_test: for performance test
init_log: to init logger
"""
self.__api_key = api_key
self.__secret_key = secret_key
self.__server_url = url or get_default_server_url(None)
self.__init_log = kwargs.get("init_log", None)
self.__performance_test = kwargs.get("performance_test", None)
if self.__init_log and self.__init_log:
logger.addHandler(logging.StreamHandler())
def request_process(self, method, url, params):
if self.__performance_test is not None and self.__performance_test is True:
return self.request_process_performance(method, url, params)
else:
return self.request_process_product(method, url, params)
def create_request(self, method, url, params):
builder = UrlParamsBuilder()
if params and len(params):
if method in [HttpMethod.GET, HttpMethod.GET_SIGN]:
for key, value in params.items():
builder.put_url(key, value)
elif method in [HttpMethod.POST, HttpMethod.POST_SIGN]:
for key, value in params.items():
builder.put_post(key, value)
else:
raise HuobiApiException(HuobiApiException.EXEC_ERROR, "[error] undefined HTTP method")
if method == HttpMethod.GET:
request = self.__create_request_by_get(url, builder)
elif method == HttpMethod.GET_SIGN:
request = self.__create_request_by_get_with_signature(url, builder)
elif method == HttpMethod.POST_SIGN:
request = self.__create_request_by_post_with_signature(url, builder)
elif method == HttpMethod.POST:
request = self.__create_request_by_post_with_signature(url, builder)
else:
raise HuobiApiException(HuobiApiException.INPUT_ERROR, "[Input] " + method + " is invalid http method")
return request
def request_process_product(self, method, url, params):
request = self.create_request(method, url, params)
if request:
return call_sync(request)
return None
def request_process_performance(self, method, url, params):
request = self.create_request(method, url, params)
if request:
return call_sync_perforence_test(request)
return None, 0, 0
"""
for post batch operation, such as batch create orders[ /v1/order/batch-orders ]
"""
def create_request_post_batch(self, method, url, params):
builder = UrlParamsBuilder()
if params and len(params):
if method in [HttpMethod.POST, HttpMethod.POST_SIGN]:
if isinstance(params, list):
builder.post_list = params
else:
raise HuobiApiException(HuobiApiException.EXEC_ERROR, "[error] undefined HTTP method")
request = self.__create_request_by_post_with_signature(url, builder)
return request
"""
for post batch operation, such as batch create orders[ /v1/order/batch-orders ]
"""
def request_process_post_batch(self, method, url, params):
if self.__performance_test is not None and self.__performance_test is True:
return self.request_process_post_batch_performance(method, url, params)
else:
return self.request_process_post_batch_product(method, url, params)
def request_process_post_batch_product(self, method, url, params):
request = self.create_request_post_batch(method, url, params)
if request:
return call_sync(request)
return None
def request_process_post_batch_performance(self, method, url, params):
request = self.create_request_post_batch(method, url, params)
if request:
return call_sync_perforence_test(request)
return None, 0, 0
def __create_request_by_get(self, url, builder):
request = RestApiRequest()
request.method = "GET"
request.host = self.__server_url
request.header.update({'Content-Type': 'application/json'})
request.url = url + builder.build_url()
return request
def __create_request_by_post_with_signature(self, url, builder):
request = RestApiRequest()
request.method = "POST"
request.host = self.__server_url
create_signature(self.__api_key, self.__secret_key, request.method, request.host + url, builder)
request.header.update({'Content-Type': 'application/json'})
if len(builder.post_list): # specify for case : /v1/order/batch-orders
request.post_body = builder.post_list
else:
request.post_body = builder.post_map
request.url = url + builder.build_url()
return request
def __create_request_by_get_with_signature(self, url, builder):
request = RestApiRequest()
request.method = "GET"
request.host = self.__server_url
create_signature(self.__api_key, self.__secret_key, request.method, request.host + url, builder)
request.header.update({"Content-Type": "application/x-www-form-urlencoded"})
request.url = url + builder.build_url()
return request
class SubscribeClient(object):
subscribe_watch_dog = WebSocketWatchDog()
def __init__(self, api_key=None, secret_key=None, **kwargs):
"""
Create the subscription client to subscribe the update from server.
:param kwargs: The option of subscription connection.
api_key: The public key applied from Huobi.
secret_key: The private key applied from Huobi.
url: Set the URI for subscription.
init_log: to init logger
"""
self.__api_key = api_key
self.__secret_key = secret_key
self.__uri = kwargs.get("url", WebSocketDefine.Uri)
self.__init_log = kwargs.get("init_log", None)
if self.__init_log and self.__init_log:
logger.addHandler(logging.StreamHandler())
self.__websocket_manage_list = list()
def __create_websocket_manage(self, request):
manager = WebsocketManage(self.__api_key, self.__secret_key, self.__uri, request)
self.__websocket_manage_list.append(manager)
manager.connect()
SubscribeClient.subscribe_watch_dog.on_connection_created(manager)
def create_request(self, subscription_handler, callback, error_handler, is_trade, is_mbp_feed=False):
request = WebsocketRequest()
request.subscription_handler = subscription_handler
request.is_trading = is_trade
request.is_mbp_feed = is_mbp_feed
request.auto_close = False # subscribe need connection. websocket request need close request.
request.update_callback = callback
request.error_handler = error_handler
return request
def create_request_v1(self, subscription_handler, callback, error_handler, is_trade=False):
request = self.create_request(subscription_handler=subscription_handler, callback=callback,
error_handler=error_handler, is_trade=is_trade)
request.api_version = ApiVersion.VERSION_V1
return request
def create_request_v2(self, subscription_handler, callback, error_handler, is_trade=False):
request = self.create_request(subscription_handler=subscription_handler, callback=callback,
error_handler=error_handler, is_trade=is_trade)
request.api_version = ApiVersion.VERSION_V2
return request
def execute_subscribe_v1(self, subscription_handler, callback, error_handler, is_trade=False):
request = self.create_request_v1(subscription_handler, callback, error_handler, is_trade)
self.__create_websocket_manage(request)
def execute_subscribe_v2(self, subscription_handler, callback, error_handler, is_trade=False):
request = self.create_request_v2(subscription_handler, callback, error_handler, is_trade)
self.__create_websocket_manage(request)
def execute_subscribe_mbp(self, subscription_handler, callback, error_handler, is_trade=False,
is_mbp_feed=True):
request = self.create_request(subscription_handler, callback, error_handler, is_trade, is_mbp_feed)
self.__create_websocket_manage(request)
def unsubscribe_all(self):
for websocket_manage in self.__websocket_manage_list:
SubscribeClient.subscribe_watch_dog.on_connection_closed(websocket_manage)
websocket_manage.close()
self.__websocket_manage_list.clear()
class WebSocketReqClient(object):
def __init__(self, api_key=None, secret_key=None, **kwargs):
"""
Create the subscription client to subscribe the update from server.
:param kwargs: The option of subscription connection.
api_key: The public key applied from Huobi.
secret_key: The private key applied from Huobi.
url: Set the URI for subscription.
init_log: to init logger
"""
self.__api_key = api_key
self.__secret_key = secret_key
self.__uri = kwargs.get("url", WebSocketDefine.Uri)
self.__init_log = kwargs.get("init_log", None)
if self.__init_log and self.__init_log:
logger = logging.getLogger("huobi-client")
logger.setLevel(level=logging.INFO)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
logger.addHandler(handler)
def __create_websocket_manage(self, request):
manager = WebsocketManage(self.__api_key, self.__secret_key, self.__uri, request)
manager.connect()
def create_request(self, subscription_handler, callback, error_handler, is_trade=False, is_mbp_feed=False):
request = WebsocketRequest()
request.subscription_handler = subscription_handler
request.is_trading = is_trade
request.is_mbp_feed = is_mbp_feed
request.auto_close = True # for websocket request, auto close the connection after request.
request.update_callback = callback
request.error_handler = error_handler
return request
def execute_subscribe_v1(self, subscription_handler, callback, error_handler, is_trade=False):
request = self.create_request(subscription_handler, callback, error_handler, is_trade)
self.__create_websocket_manage(request)
def execute_subscribe_mbp(self, subscription_handler, callback, error_handler, is_trade=False,
is_mbp_feed=True):
request = self.create_request(subscription_handler, callback, error_handler, is_trade, is_mbp_feed)
self.__create_websocket_manage(request)
| 2.0625
| 2
|
app/data/rader_chart/bin/sub/output_chart_data.py
|
yokrh/sdvx-score-rader
| 0
|
12777215
|
<filename>app/data/rader_chart/bin/sub/output_chart_data.py
import json
import fnmatch
import os
import codecs
"""
Create rader chart json data of a track.
Parameters
----------
name : string
level : string
difficulty : string
prediction_dir : string
output_dir : string
"""
def create_chart_data(
*,
name,
level,
difficulty,
prediction_dir,
output_dir
):
bt_long = fnmatch.filter(
os.listdir(os.path.join(prediction_dir, 'label_btn', 'btn_bt_long', '1')),
'*.png'
)
douji = fnmatch.filter(
os.listdir(os.path.join(prediction_dir, 'label_btn', 'btn_douji', '1')),
'*.png'
)
niju = fnmatch.filter(
os.listdir(os.path.join(prediction_dir, 'label_btn', 'btn_niju', '1')),
'*.png'
)
tateren = fnmatch.filter(
os.listdir(os.path.join(prediction_dir, 'label_btn', 'btn_tateren', '1')),
'*.png'
)
trill = fnmatch.filter(
os.listdir(os.path.join(prediction_dir, 'label_btn', 'btn_trill', '1')),
'*.png'
)
# tsumami = fnmatch.filter(
# os.listdir(os.path.join(prediction_dir, 'label_vol', 'vol_xxx', '1')),
# '*.png'
# )
# chokkaku_simple = fnmatch.filter(
# os.listdir(os.path.join(prediction_dir, 'label_vol', 'vol_xxx', '1')),
# '*.png'
# )
# chokkaku_complex = fnmatch.filter(
# os.listdir(os.path.join(prediction_dir, 'label_vol', 'vol_xxx', '1')),
# '*.png'
# )
# katate = fnmatch.filter(
# os.listdir(os.path.join(prediction_dir, 'label_vol', 'vol_xxx', '1')),
# '*.png'
# )
json_data = json.dumps(
{
'name': name,
'level': level,
'difficulty': difficulty,
'bt_long': len(bt_long),
'douji': len(douji),
'niju': len(niju),
'tateren': len(tateren),
'trill': len(trill) #,
# 'tsumami': len(tsumami),
# 'chokkaku_simple': len(chokkaku_simple),
# 'chokkaku_complex': len(chokkaku_complex),
# 'katate': len(katate)
},
indent=2,
ensure_ascii=False
)
cmd = 'mkdir -p ' + output_dir
os.popen(cmd).read()
filename = output_dir + '/' + level + '_' + difficulty + '_' + name + '.json'
print('output filename:')
print(filename)
with open(filename, 'w') as f:
f.write(json_data + '\n')
| 2.296875
| 2
|
src/db.py
|
failip/coffee
| 0
|
12777216
|
from pymongo import MongoClient
from user import User
import json
class Database:
def __init__(self):
self.client = MongoClient(
'localhost', 27017, username="root", password="<PASSWORD>")
self.db = self.client.test_database
self.users = self.db.users
self.settings = self.db.settings
def get_user(self, name):
data = self.users.find_one({"name": name})
if (data):
return User(name=data["name"], balance=data["balance"])
else:
return None
def get_all_users(self):
pass
def create_user(self, user):
if not self.user_exsists(user):
create_id = self.users.insert_one(user.__dict__).inserted_id
return create_id
print("User already exsists")
return
def delete_user(self, name):
self.users.delete_one({"name": name})
def update_balance(self, user):
self.users.find_one_and_update(
{"name": user.name}, {"$set": {"balance": user.balance}})
def increase_balance(self, user, amount):
user.balance += amount
self.update_balance(user)
def decrease_balance(self, user, amount):
user.balance -= amount
self.update_balance(user)
def user_exsists(self, user):
if list(self.users.find({"name": user.name})):
return True
else:
return False
def get_settings(self):
return self.settings.find_one({})
def update_settings(self, settings):
items = {}
for item in settings.items:
items[item.name] = item.price
self.settings.find_one_and_update({}, {"$set": items})
def create_settings(self, settings):
items = {}
for item in settings.items:
items[item.name] = item.price
self.settings.insert_one(items)
| 3.265625
| 3
|
sustainableCityManagement/main_project/ML_models/footfall_prediction.py
|
Josh-repository/Dashboard-CityManager-
| 0
|
12777217
|
<reponame>Josh-repository/Dashboard-CityManager-
import numpy as np
import math
import sys
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import Ridge
from sklearn.linear_model import LinearRegression
from ..Config.config_handler import read_config
config_vals = read_config("Footfall_API")
# Time Series Prediction algorithm to predict the avaerage footfall count for days ahead
def predict_footfall(arrayOfFootfallData, predictDays=1, previous_days_to_consider=config_vals["days_to_consider_for_prediction"]):
X = []
y = []
for i in range(len(arrayOfFootfallData)-previous_days_to_consider):
train_part = arrayOfFootfallData[i:i+previous_days_to_consider]
test_part = arrayOfFootfallData[i+previous_days_to_consider]
X.append(train_part)
y.append(test_part)
results = []
for i in range(predictDays):
reg = LinearRegression().fit(X, y)
to_predict = arrayOfFootfallData[len(
arrayOfFootfallData)-previous_days_to_consider:len(arrayOfFootfallData)]
y_pred = reg.predict([to_predict])
# adding prediction to the list of values (needed to create the to_predict)
arrayOfFootfallData.append(y_pred[0])
X.append(to_predict) # adding train data point (needed for training)
y.append(y_pred) # adding test data point (needed for training)
results.append(y_pred) # adding prediction to results
return math.ceil(results[0])
| 2.890625
| 3
|
ver1_0/openassembly/pirate_sources/templatetags/sourcetags.py
|
fragro/Open-Assembly
| 1
|
12777218
|
<filename>ver1_0/openassembly/pirate_sources/templatetags/sourcetags.py
from django import template
from django import forms
from django.http import HttpResponseRedirect
from django.contrib.contenttypes.models import ContentType
from pirate_sources.models import IMGSource, URLSource
from pirate_core.views import HttpRedirectException, namespace_get
from django.template import Library
from django.core.urlresolvers import reverse
import re
from django.core.cache import cache
from py_etherpad import EtherpadLiteClient
import datetime
from settings import ETHERPAD_API
from filetransfers.api import prepare_upload
from django.conf import settings
from customtags.decorators import block_decorator
register = template.Library()
block = block_decorator(register)
get_namespace = namespace_get('pp_source')
@block
def pp_set_livestreamcache(context, nodelist, *args, **kwargs):
"""
Retrieves the current image for this object id.
"""
context.push()
namespace = get_namespace(context)
user = kwargs.get('user', None)
obj = kwargs.get('object', None)
cache.set(str(user.pk) + '-livestream', obj.pk)
output = nodelist.render(context)
context.pop()
return output
@block
def pp_check_livestreamcache(context, nodelist, *args, **kwargs):
"""
Retrieves the current image for this object id.
"""
context.push()
namespace = get_namespace(context)
user = kwargs.get('user', None)
namespace['livestream'] = cache.get(str(user.pk) + '-livestream')
output = nodelist.render(context)
context.pop()
return output
@block
def pp_get_pad(context, nodelist, *args, **kwargs):
"""
Retrieves the current image for this object id.
"""
context.push()
namespace = get_namespace(context)
obj = kwargs.get('object', None)
myPad = EtherpadLiteClient(ETHERPAD_API, 'http://notes.occupy.net/api')
try:
if ETHERPAD_API != None:
#Change the text of the etherpad
try:
text = myPad.getHtml(str(obj.pk))
except:
myPad.createPad(str(obj.pk), '')
text = myPad.getHtml(str(obj.pk))
namespace['text'] = text['html']
else:
namespace['text'] = '<p>No API Key</p>'
except:
namespace['text'] = '<p>Looks like the Occupy.net Etherpad server is down... Thanks for your patience</p>'
output = nodelist.render(context)
context.pop()
return output
@block
def pp_current_image(context, nodelist, *args, **kwargs):
"""
Retrieves the current image for this object id.
"""
context.push()
namespace = get_namespace(context)
obj = kwargs.get('object', None)
if obj is not None:
try:
imgsource = IMGSource.objects.get(object_pk=obj.pk, current=True)
namespace['current_img'] = imgsource
except:
imgsource = IMGSource.objects.filter(object_pk=obj.pk)
if len(imgsource) > 0:
namespace['current_img'] = imgsource
output = nodelist.render(context)
context.pop()
return output
@block
def pp_get_contenttype_id(context, nodelist, *args, **kwargs):
context.push()
namespace = get_namespace(context)
obj = kwargs.get('object',None)
if obj is not None and not isinstance(obj, basestring):
pk = ContentType.objects.get_for_model(obj).pk
namespace['ctype_pk'] = pk
output = nodelist.render(context)
context.pop()
return output
@block
def pp_get_iframe_video(context, nodelist, *args, **kwargs):
"""
This function grabs an VideoSource object and populates and
video_iframe.html file for that video, if one has not yet been
created. There is no way for the iframe src argument to
evaluate django template code, so this is the solution that
presents itself.
"""
context.push()
namespace = get_namespace(context)
obj = kwargs.get('object',None)
if obj != None:
pass
# htmllink = generate_iframe(obj.video)
#namespace['link'] = htmllink
else: raise ValueError("Submitted 'object' as NoneType to pp_get_iframe_video")
output = nodelist.render(context)
context.pop()
return output
def generate_iframe(vid):
html = """
<html>
<head>
<title>Poptastic</title>
<link href="/static/html5_video_voting/stylesheets/app.css" media="screen" rel="stylesheet" type="text/css">
<script src="/static/html5_video_voting/javascripts/jquery.js" type="text/javascript"></script>
<script src="/static/html5_video_voting/javascripts/jquery-ui.js" type="text/javascript"></script>
<script src="/static/html5_video_voting/javascripts/raphaeljs/raphael.js" type="text/javascript"></script>
<script src="/static/html5_video_voting/javascripts/raphaeljs/g.raphael.js" type="text/javascript"></script>
<script src="/static/html5_video_voting/javascripts/raphaeljs/bar.raphael.js" type="text/javascript"></script>
<script src="/static/html5_video_voting/javascripts/application.js" type="text/javascript"></script>
<style>body {margin: 0;padding: 0;overflow: hidden;}</style></head><body><div id="poptastic">
<video id="
""" + str(vid.id) + '"class="video" data-ip="<KEY>" data-keycode="32" data-bars="' + str(vid.bars) + '" width="600" height="340"><source src="' + str(vid.filename_mp4) + """ " type='video/mp4; codecs="avc1.42E01E, mp4a.40.2"'>
<source src=" """ + str(vid.filename_webm) + """ " type='video/webm; codecs="vp8, vorbis"'>
<source src=" """ + str(vid.filename_ogg) + """" type='video/ogg; codecs="theora, vorbis"'>
</video>
<div id="controls">
<a href="#pause" id="play">Play</a>
<div id="bar-wrapper-container">
<div id="bar-wrapper">
<div id="bar"></div>
<div id="buffer"></div>
<a href="#" id="paddle" class="ui-draggable"></a>
</div>
</div>
<a href="#vol" id="volume"><span></span></a>
</div>
<div id="chart"></div>
</div>
<div id="error">You must be logged in to vote!</div>
</body>
</html>"""
return html
@block
def pp_get_source(context, nodelist, *args, **kwargs):
context.push()
namespace = get_namespace(context)
obj = kwargs.get('object', None)
t = kwargs.get('type', None)
src = None
try:
if obj is not None:
if t == 'url':
src = URLSource.objects.get(pk=obj.pk)
elif t == 'img':
src = IMGSource.objects.get(pk=obj.pk)
except:
pass
namespace['src'] = src
output = nodelist.render(context)
context.pop()
return output
@block
def pp_calculate_size(context, nodelist, *args, **kwargs):
context.push()
namespace = get_namespace(context)
w = kwargs.get('width', None)
h = kwargs.get('height', None)
img = kwargs.get('img', None)
if img is not None:
img_w = img.file.width
img_h = img.file.height
if w is not None and h is not None:
if int(img_w) > 581:
namespace['width'] = '100%'
else:
namespace['width'] = int(min(int(img_w), int(w)/1.625))
namespace['geometry'] = str(int(min(int(img_w), int(w)/1.625)))
else:
if int(img_w) > 581:
namespace['width'] = '100%'
else:
namespace['width'] = str(img_w) + 'px'
namespace['geometry'] = str(img_w)
output = nodelist.render(context)
context.pop()
return output
@block
def pp_get_sources(context, nodelist, *args, **kwargs):
context.push()
namespace = get_namespace(context)
obj = kwargs.get('object', None)
t = kwargs.get('type', None)
get = kwargs.get('get', None)
try:
if obj is not None:
content_type = ContentType.objects.get_for_model(obj)
namespace['ctype'] = content_type.pk
if t == 'url':
namespace['sources'] = URLSource.objects.filter(object_pk=obj.pk, is_video=False)
#namespace['videosource_list'] = URLSource.objects.filter(object_pk=obj.pk, is_video=True)
elif t == 'img':
if get == "all":
l = IMGSource.objects.filter(object_pk=obj.pk).order_by('submit_date')
namespace['sources'] = l
cnt = l.count()
namespace['count'] = cnt
else:
try:
namespace['cur_img'] = IMGSource.objects.get(object_pk=obj.pk, current=True)
except:
namespace['cur_img'] = None
try:
namespace['cur_img'] = IMGSource.objects.get(pk=obj.pk, current=True)
except:
pass
except:
namespace['cur_img'] = None
namespace['sources'] = []
output = nodelist.render(context)
context.pop()
return output
@block
def pp_ajaximg(context, nodelist, *args, **kwargs):
"""
Modifies the request session data to prep it for AJAX requests to the Ajax image uploader.
"""
context.push()
namespace = get_namespace(context)
request = kwargs.get('request', None)
obj = kwargs.get('object', None)
if obj is not None and request is not None:
ctype = ContentType.objects.get_for_model(obj)
request.session['object_pk'] = obj.pk
request.session['content_pk'] = ctype.pk
output = nodelist.render(context)
context.pop()
return output
@block
def pp_imgsource_form(context, nodelist, *args, **kwargs):
'''
This block tag can create or process forms eitfrom genericview.views import HttpRedirectException, namespace_gether to create or to modify arguments.
Usage is as follows:
{% pp_topic_form POST=request.POST path=request.path topic=pp_topic.topic root=some_topic %}
Do stuff with {{ pp_source.form }}.
{% endpp_topic_form %}
'''
context.push()
namespace = get_namespace(context)
POST = kwargs.get('POST', None)
FILE = kwargs.get('FILE', None)
obj = kwargs.get('object', None)
request = kwargs.get('request', None)
content_type = ContentType.objects.get_for_model(obj)
view_url = reverse('pirate_sources.views.upload_handler', args=[obj.pk, content_type.pk])
if POST:
form = IMGSourceForm(POST, FILE)
if form.is_valid():
img = form.save()
img.user = request.user
img.object_pk = obj.pk
img.content_type = ContentType.objects.get(pk=content_type.pk)
img.submit_date = datetime.datetime.now()
img.make(request.FILES['file'], img.file.name)
if img.private != True:
try:
oldimg = IMGSource.objects.get(object_pk=obj.pk, current=True)
oldimg.current = False
oldimg.save()
except:
pass
img.current = True
img.save()
upload_url, upload_data = prepare_upload(request, view_url)
form = IMGSourceForm()
else:
view_url += '?error=Not a valid image'
namespace['errors'] = form.errors
upload_url, upload_data = prepare_upload(request, view_url)
form = IMGSourceForm()
namespace['upload_url'] = upload_url
namespace['upload_data'] = upload_data
namespace['form'] = form
output = nodelist.render(context)
context.pop()
return output
class IMGSourceForm(forms.ModelForm):
def save(self, commit=True):
new_source = super(IMGSourceForm, self).save(commit=commit)
return new_source
class Meta:
model = IMGSource
exclude = ('user', 'title', 'url', 'current', 'thumbnail', 'thumbnail_small', 'thumbnail_large', 'content_type', 'object_pk')
form_id = forms.CharField(widget=forms.HiddenInput(), initial="pp_imgsource_form")
@block
def pp_videosource_form(context, nodelist, *args, **kwargs):
'''
This block tag can create or process forms eitfrom genericview.views import HttpRedirectException, namespace_gether to create or to modify arguments.
Usage is as follows:
{% pp_topic_form POST=request.POST path=request.path topic=pp_topic.topic root=some_topic %}
Do stuff with {{ pp_source.form }}.
{% endpp_topic_form %}
'''
context.push()
namespace = get_namespace(context)
POST = kwargs.get('POST', None)
source = kwargs.get('source', None)
obj = kwargs.get('object', None)
user = kwargs.get('user', None)
if POST and POST.get("form_id") == "pp_videosource_form":
form = VideoSourceForm(POST) if source is None else VideoSourceForm(POST, instance=source)
if form.is_valid():
new_source = form.save(commit=False)
new_source.user = user
new_source.content_type = ContentType.objects.get_for_model(obj.__class__)
new_source.object_pk = obj.pk
new_source.content_object = obj
url = parse_video_url(form.cleaned_data['url'])
if url != None:
new_source.url = url
new_source.is_video = True
try:
source = URLSource.objects.get(url=url, object_pk=obj.pk)
except:
new_source.save()
#raise HttpRedirectException(HttpResponseRedirect(obj.get_absolute_url()))
else:
namespace['errors'] = "Not a valid youtube URL"
else:
namespace['errors'] = "Not a valid URL"
#path = obj.get_absolute_url()
#raise HttpRedirectException(HttpResponseRedirect(path))
else:
form = VideoSourceForm() if source is None else VideoSourceForm(instance=source)
namespace['urlform'] = form
output = nodelist.render(context)
context.pop()
return output
def parse_video_url(url):
#TODO: Need to update this, it only accepts a limited number of youtube urls
l = re.split('\/', url)
try:
if l[-1][0:8] == 'watch?v=':
return l[-1][8:]
except:
if l[-1] != '':
return l[-1]
else:
return None
@block
def pp_urlsource_form(context, nodelist, *args, **kwargs):
'''
This block tag can create or process forms eitfrom genericview.views import HttpRedirectException, namespace_gether to create or to modify arguments.
Usage is as follows:
{% pp_topic_form POST=request.POST path=request.path topic=pp_topic.topic root=some_topic %}
Do stuff with {{ pp_source.form }}.
{% endpp_topic_form %}
'''
context.push()
namespace = get_namespace(context)
POST = kwargs.get('POST', None)
source = kwargs.get('source', None)
obj = kwargs.get('object', None)
user = kwargs.get('user', None)
# print request
# print "WTF?"
if POST and POST.get("form_id") == "pp_urlsource_form":
form = URLSourceForm(POST) if source is None else URLSourceForm(POST, instance=source)
if form.is_valid():
new_source = form.save(commit=False)
new_source.user = user
new_source.content_type = ContentType.objects.get_for_model(obj.__class__)
new_source.object_pk = obj.pk
new_source.content_object = obj
new_source.is_video = False
new_source.save()
form = URLSourceForm()
else:
namespace['errors'] = "Not a valid URL"
#raise HttpRedirectException(HttpResponseRedirect(path))
else:
form = URLSourceForm() if source is None else URLSourceForm(instance=source)
namespace['urlform'] = form
output = nodelist.render(context)
context.pop()
return output
class URLSourceForm(forms.ModelForm):
def save(self, commit=True):
new_source = super(URLSourceForm, self).save(commit=commit)
return new_source
class Meta:
model = URLSource
exclude = ('content_type', 'object_pk', 'content_object', 'user')
form_id = forms.CharField(widget=forms.HiddenInput(), initial="pp_urlsource_form")
url = forms.CharField(initial="http://")
class VideoSourceForm(forms.ModelForm):
def save(self, commit=True):
new_source = super(VideoSourceForm, self).save(commit=commit)
return new_source
class Meta:
model = URLSource
exclude = ('content_type', 'object_pk', 'content_object', 'user')
form_id = forms.CharField(widget=forms.HiddenInput(), initial="pp_videosource_form")
| 2.125
| 2
|
bin/Resize.py
|
tsteffek/LicensePlateReconstructor
| 2
|
12777219
|
import argparse
import multiprocessing
from multiprocessing.queues import Queue
from queue import Empty
from PIL.Image import Image
from src.base import IO
def repeat(queue: Queue, resize):
try:
while True:
load_resize_save(queue.get(True, 5), resize)
except Empty:
return
def load_resize_save(image_file, resize):
img: Image = IO.load_image(image_file)
img = img.resize(resize)
img.save(image_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--source', type=str)
parser.add_argument('--target_size', type=float)
parser.add_argument('--worker', type=int)
parser.add_argument('--image_glob', type=str, default='**/*.jpg')
args = parser.parse_args()
images = IO.get_image_paths(args.source, args.image_glob)
resize = IO.load_image(images[0]).size
resize = tuple(int(args.target_size * el) for el in resize)
multiprocessing.set_start_method('spawn')
q = multiprocessing.Queue()
for img_file in images:
q.put(img_file)
processes = [
multiprocessing.Process(target=repeat, args=(q, resize), daemon=True) for w in range(args.worker)
]
for process in processes:
process.start()
for process in processes:
process.join()
| 2.796875
| 3
|
toBus/base_bus.py
|
sherry0429/tobus
| 2
|
12777220
|
<reponame>sherry0429/tobus<filename>toBus/base_bus.py
# -*- coding: utf-8 -*-
"""
Copyright (C) 2017 <NAME> <sherry0429 at SOAPython>
"""
from threading import Thread
import time
import pickle
import redis
from base_msg import BaseMessage
class MsgBus(object):
access_modules = set()
def __init__(self, redis_ip, redis_port, redis_db):
self.redis_ip = redis_ip
self.redis_port = redis_port
self.redis_db = redis_db
self.redis = redis.StrictRedis(redis_ip,
redis_port,
redis_db)
def start(self):
m_th = MsgReceiver(self.redis, self.access_modules, self.msg_recv_callback, self.msg_send_callback)
m_th.start()
print "bus with redis %s:%s:%s start" % (self.redis_ip, str(self.redis_port), str(self.redis_db))
def msg_recv_callback(self, msg):
pass
def msg_send_callback(self, msg):
pass
class MsgReceiver(Thread):
def __init__(self, redis, access_modules, msg_recv_callback, msg_send_callback):
super(MsgReceiver, self).__init__()
self.redis = redis
self.access_modules = access_modules
self.msg_recv_callback = msg_recv_callback
self.msg_send_callback = msg_send_callback
def run(self):
while True:
msg = self.redis.rpop('bus')
if msg is not None:
# use Controller, to judge every msg
content = pickle.loads(msg)
self.msg_recv_callback(content)
if isinstance(content, BaseMessage) or isinstance(content.__class__.__bases__[0], BaseMessage):
if content.before is not None:
self.access_modules.add(content.before)
if len(content.direction) != 0:
if content.publish_module_type is not None:
content.direction = list()
self.redis.publish('%s' % content.publish_module_type)
else:
direction = content.direction[0]
self.redis.lpush('bus-%s' % direction.lower(), msg)
self.msg_send_callback(content)
time.sleep(1)
| 2.5
| 2
|
experiments/uai_experiments.py
|
bradyneal/realcause
| 35
|
12777221
|
import numpy as np
import pandas as pd
import time
from pathlib import Path
from experiments.evaluation import calculate_metrics
from causal_estimators.ipw_estimator import IPWEstimator
from causal_estimators.standardization_estimator import \
StandardizationEstimator, StratifiedStandardizationEstimator
from experiments.evaluation import run_model_cv
from loading import load_from_folder
from sklearn.linear_model import LogisticRegression, LinearRegression, Lasso, Ridge, ElasticNet, RidgeClassifier
from sklearn.svm import SVR, LinearSVR, SVC, LinearSVC
from sklearn.kernel_ridge import KernelRidge
from sklearn.neural_network import MLPClassifier, MLPRegressor
from sklearn.neighbors import KNeighborsClassifier, KNeighborsRegressor
from sklearn.gaussian_process import GaussianProcessClassifier, GaussianProcessRegressor
from sklearn.gaussian_process.kernels import RBF
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor, AdaBoostRegressor, GradientBoostingRegressor,\
RandomForestClassifier, AdaBoostClassifier, GradientBoostingClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis, QuadraticDiscriminantAnalysis
from sklearn.pipeline import Pipeline, make_pipeline
from sklearn.preprocessing import StandardScaler, PolynomialFeatures
from sklearn.exceptions import UndefinedMetricWarning
import warnings
warnings.simplefilter(action='ignore', category=UndefinedMetricWarning)
# warnings.filterwarnings("ignore", message="UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 due to no predicted samples. Use `zero_division` parameter to control this behavior.")
RESULTS_DIR = Path('results')
alphas = {'alpha': np.logspace(-4, 5, 10)}
# gammas = [] + ['scale']
Cs = np.logspace(-4, 5, 10)
d_Cs = {'C': Cs}
SVM = 'svm'
d_Cs_pipeline = {SVM + '__C': Cs}
max_depths = list(range(2, 10 + 1)) + [None]
d_max_depths = {'max_depth': max_depths}
d_max_depths_base = {'base_estimator__max_depth': max_depths}
Ks = {'n_neighbors': [1, 2, 3, 5, 10, 15, 25, 50, 100, 200]}
OUTCOME_MODEL_GRID = [
('LinearRegression', LinearRegression(), {}),
('LinearRegression_interact',
make_pipeline(PolynomialFeatures(degree=2, interaction_only=True),
LinearRegression()),
{}),
('LinearRegression_degree2',
make_pipeline(PolynomialFeatures(degree=2), LinearRegression()), {}),
# ('LinearRegression_degree3',
# make_pipeline(PolynomialFeatures(degree=3), LinearRegression()), {}),
('Ridge', Ridge(), alphas),
('Lasso', Lasso(), alphas),
('ElasticNet', ElasticNet(), alphas),
('KernelRidge', KernelRidge(), alphas),
('SVM_rbf', SVR(kernel='rbf'), d_Cs),
('SVM_sigmoid', SVR(kernel='sigmoid'), d_Cs),
('LinearSVM', LinearSVR(), d_Cs),
# (SVR(kernel='linear'), d_Cs), # doesn't seem to work (runs forever)
# TODO: add tuning of SVM gamma, rather than using the default "scale" setting
# SVMs are sensitive to input scale
('Standardized_SVM_rbf', Pipeline([('standard', StandardScaler()), (SVM, SVR(kernel='rbf'))]),
d_Cs_pipeline),
('Standardized_SVM_sigmoid', Pipeline([('standard', StandardScaler()), (SVM, SVR(kernel='sigmoid'))]),
d_Cs_pipeline),
('Standardized_LinearSVM', Pipeline([('standard', StandardScaler()), (SVM, LinearSVR())]),
d_Cs_pipeline),
('kNN', KNeighborsRegressor(), Ks),
# GaussianProcessRegressor(),
# TODO: also cross-validate over min_samples_split and min_samples_leaf
('DecisionTree', DecisionTreeRegressor(), d_max_depths),
# ('RandomForest', RandomForestRegressor(), d_max_depths),
# TODO: also cross-validate over learning_rate
# ('AdaBoost', AdaBoostRegressor(base_estimator=DecisionTreeRegressor(max_depth=None)), d_max_depths_base),
# ('GradientBoosting', GradientBoostingRegressor(), d_max_depths),
# MLPRegressor(max_iter=1000),
# MLPRegressor(alpha=1, max_iter=1000),
]
PROP_SCORE_MODEL_GRID = [
('LogisticRegression_l2', LogisticRegression(penalty='l2'), d_Cs),
('LogisticRegression', LogisticRegression(penalty='none'), {}),
('LogisticRegression_l2_liblinear', LogisticRegression(penalty='l2', solver='liblinear'), d_Cs),
('LogisticRegression_l1_liblinear', LogisticRegression(penalty='l1', solver='liblinear'), d_Cs),
('LogisticRegression_l1_saga', LogisticRegression(penalty='l1', solver='saga'), d_Cs),
('LDA', LinearDiscriminantAnalysis(), {}),
('LDA_shrinkage', LinearDiscriminantAnalysis(solver='lsqr', shrinkage='auto'), {}),
('QDA', QuadraticDiscriminantAnalysis(), {}),
# TODO: add tuning of SVM gamma, rather than using the default "scale" setting
('SVM_rbf', SVC(kernel='rbf', probability=True), d_Cs),
('SVM_sigmoid', SVC(kernel='sigmoid', probability=True), d_Cs),
# ('SVM_linear', SVC(kernel='linear', probability=True), d_Cs), # doesn't seem to work (runs forever)
# SVMs are sensitive to input scale
('Standardized_SVM_rbf', Pipeline([('standard', StandardScaler()), (SVM, SVC(kernel='rbf', probability=True))]),
d_Cs_pipeline),
('Standardized_SVM_sigmoid', Pipeline([('standard', StandardScaler()),
(SVM, SVC(kernel='sigmoid', probability=True))]),
d_Cs_pipeline),
# ('Standardized_SVM_linear', Pipeline([('standard', StandardScaler()),
# (SVM, SVC(kernel='linear', probability=True))]),
# d_Cs_pipeline), # doesn't seem to work (runs forever)
('kNN', KNeighborsClassifier(), Ks),
# GaussianProcessClassifier(),
('GaussianNB', GaussianNB(), {}),
# TODO: also cross-validate over min_samples_split and min_samples_leaf
('DecisionTree', DecisionTreeClassifier(), d_max_depths),
# ('RandomForest', RandomForestClassifier(), max_depths),
# TODO: also cross-validate over learning_rate
# ('AdaBoost', AdaBoostClassifier(base_estimator=DecisionTreeClassifier(max_depth=None)), d_max_depths_base),
# ('GradientBoosting', GradientBoostingClassifier(), d_max_depths),
# MLPClassifier(max_iter=1000),
# MLPClassifier(alpha=1, max_iter=1000),
]
psid_gen_model, args = load_from_folder(dataset='lalonde_psid1')
cps_gen_model, args = load_from_folder(dataset='lalonde_cps1')
twins_gen_model, args = load_from_folder(dataset='twins')
psid_ate = psid_gen_model.ate(noisy=True)
psid_ite = psid_gen_model.ite(noisy=True).squeeze()
cps_ate = cps_gen_model.ate(noisy=True)
cps_ite = cps_gen_model.ite(noisy=True).squeeze()
twins_ate = twins_gen_model.ate(noisy=False)
twins_ite = twins_gen_model.ite(noisy=False).squeeze()
GEN_MODELS = [
('lalonde_psid', psid_gen_model, psid_ate, psid_ite),
('lalonde_cps', cps_gen_model, cps_ate, cps_ite),
('twins', twins_gen_model, twins_ate, twins_ite)
]
t_start = time.time()
N_SEEDS_CV = 5
N_SEEDS_METRICS = 5
def run_experiments_for_estimator(get_estimator_func, model_grid, save_location,
meta_est_name, model_type, exclude=[],
gen_models=GEN_MODELS, n_seeds_cv=N_SEEDS_CV,
n_seeds_metrics=N_SEEDS_METRICS):
# if outcome_model_grid is None and prop_score_model_grid is None:
# raise ValueError('Either outcome_model_grid or prop_score_model_grid must be not None.')
# if outcome_model_grid is not None and prop_score_model_grid is not None:
# raise ValueError('Currently only supporting one non-None model grid.')
# outcome_modeling = outcome_model_grid is not None
# model_grid = outcome_model_grid if outcome_modeling else prop_score_model_grid
# model_type = 'outcome' if outcome_modeling else 'prop_score'
valid_model_types = ['outcome', 'prop_score']
if model_type not in valid_model_types:
raise ValueError('Invalid model_type... Valid model_types: {}'.format(valid_model_types))
param_str = 'params_' + model_type + '_model'
dataset_dfs = []
for gen_name, gen_model, ate, ite in gen_models:
print('DATASET:', gen_name)
dataset_start = time.time()
model_dfs = []
for model_name, model, param_grid in model_grid:
print('MODEL:', model_name)
if (gen_name, model_name) in exclude or model_name in exclude:
print('SKIPPING')
continue
model_start = time.time()
results = run_model_cv(gen_model, model, model_name=model_name, param_grid=param_grid,
n_seeds=n_seeds_cv, model_type=model_type, best_model=False, ret_time=False)
metrics_list = []
for params in results[param_str]:
try:
est_start = time.time()
estimator = get_estimator_func(model.set_params(**params))
metrics = calculate_metrics(gen_model, estimator, n_seeds=n_seeds_metrics,
conf_ints=False, ate=ate, ite=ite)
est_end = time.time()
# Add estimator fitting time in minutes
metrics['time'] = (est_end - est_start) / 60
metrics_list.append(metrics)
except ValueError:
print('Skipping {} params: {}'.format(model_name, params))
causal_metrics = pd.DataFrame(metrics_list)
model_df = pd.concat([results, causal_metrics], axis=1)
model_df.insert(0, 'dataset', gen_name)
model_df.insert(1, 'meta-estimator', meta_est_name)
model_dfs.append(model_df)
model_end = time.time()
print(model_name, 'time:', (model_end - model_start) / 60, 'minutes')
dataset_df = pd.concat(model_dfs, axis=0)
dataset_end = time.time()
print(gen_name, 'time:', (dataset_end - dataset_start) / 60 / 60, 'hours')
dataset_dfs.append(dataset_df)
full_df = pd.concat(dataset_dfs, axis=0)
t_end = time.time()
print('Total time elapsed:', (t_end - t_start) / 60 / 60, 'hours')
full_df.to_csv(save_location, float_format='%.2f', index=False)
return full_df
print('STANDARDIZATION')
stand_df = run_experiments_for_estimator(
lambda model: StandardizationEstimator(outcome_model=model),
model_grid=OUTCOME_MODEL_GRID,
save_location=RESULTS_DIR / 'psid_cps_twins_standard.csv',
meta_est_name='standardization',
model_type='outcome',
gen_models=GEN_MODELS)
print('STRATIFIED STANDARDIZATION')
strat_df = run_experiments_for_estimator(
lambda model: StratifiedStandardizationEstimator(outcome_models=model),
model_grid=OUTCOME_MODEL_GRID,
exclude=[('lalonde_cps', 'KernelRidge')],
save_location=RESULTS_DIR / 'psid_cps_twins_strat_standard.csv',
meta_est_name='stratified_standardization',
model_type='outcome',
gen_models=GEN_MODELS)
print('IPW')
ps_df = run_experiments_for_estimator(
lambda model: IPWEstimator(prop_score_model=model),
model_grid=PROP_SCORE_MODEL_GRID,
# exclude=[('lalonde_psid', 'SVM_rbf')],
exclude=['SVM_rbf'],
save_location=RESULTS_DIR / 'psid_cps_twins_ipw.csv',
meta_est_name='ipw',
model_type='prop_score',
gen_models=GEN_MODELS)
print('IPW TRIM EPS 0.01')
ps_trim_df = run_experiments_for_estimator(
lambda model: IPWEstimator(prop_score_model=model, trim_eps=0.01),
model_grid=PROP_SCORE_MODEL_GRID,
# exclude=[('lalonde_psid', 'SVM_rbf')],
exclude=['SVM_rbf'],
save_location=RESULTS_DIR / 'psid_cps_twins_ipw_trim_01.csv',
meta_est_name='ipw_trimeps.01',
model_type='prop_score',
gen_models=GEN_MODELS)
print('IPW Stabilized weights')
ps_stab_df = run_experiments_for_estimator(
lambda model: IPWEstimator(prop_score_model=model, stabilized=True),
model_grid=PROP_SCORE_MODEL_GRID,
# exclude=[('lalonde_psid', 'SVM_rbf')],
exclude=['SVM_rbf'],
save_location=RESULTS_DIR / 'psid_cps_twins_ipw_stabilized.csv',
meta_est_name='ipw_stabilized',
model_type='prop_score',
gen_models=GEN_MODELS)
| 2.140625
| 2
|
NER/handlepseudosamples.py
|
qcwthu/Lifelong-Fewshot-Language-Learning
| 36
|
12777222
|
import os
import json
import torch
import torch.nn as nn
import torch.optim as optim
import torch.utils as utils
import sys
import argparse
import matplotlib
import pdb
import numpy as np
import time
import random
import re
import time
import matplotlib.pyplot as plt
from tqdm import tqdm
from tqdm import trange
from sklearn import metrics
from torch.utils import data
from collections import Counter
from transformers import AdamW, get_linear_schedule_with_warmup
from transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config
from torch.cuda.amp import autocast as autocast
from torch.cuda.amp import GradScaler as GradScaler
def seed_everything(args):
random.seed(args.seed)
os.environ['PYTHONASSEED'] = str(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
torch.cuda.manual_seed(args.seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = True
def ifinclude(str1,str2):
#name.lower() in linelist[0].lower():
str1list = str1.lower().split(' ') ####name
str2list = str2.lower().split(' ') ####linelist
ifin = False
for i in range(0,len(str2list)):
if str2list[i] == str1list[0]:
ifin = True
for j in range(1,len(str1list)):
if str2list[i+j] != str1list[j]:
ifin = False
break
if ifin == True:
break
else:
continue
return ifin
def handlefile(inputfile,outputfile,allnumber,trainnumber):
f = open(inputfile,'r')
allres = {}
alltype = []
for key in allnumber.keys():
alltype.append(key)
insen = 0
allin = {}
notinsen = 0
allnotin = {}
while True:
line = f.readline().strip()
if not line:
break
linelist = line.split("__ans__")
if len(linelist) != 2:
continue
entitylist = linelist[1]
if entitylist == 'end':
continue
if ';' not in entitylist:
continue
allentity = entitylist.split(";")
if len(allentity) != 2:
continue
firstentity = allentity[0]
#print(firstentity)
if '!' not in firstentity:
continue
splitent = firstentity.split('!')
if len(splitent) != 2:
continue
thistype = splitent[1].strip()
#print(thistype)
if thistype not in alltype:
continue
#print(linelist[0] + '\t' + linelist[1])
name = linelist[1].split(";")[0].split("!")[0].strip(' ')
entype = linelist[1].split(";")[0].split("!")[1].strip(' ')
whole = name + " ! " + entype + " ;"
#print(name)
#####some filters
thissen = linelist[0]
####length
# senlist = thissen.split(' ')
# if len(senlist) <= 3:
# continue
# digitnum = 0
# for one in senlist:
# if re.search(r'\d', one):
# digitnum += 1
# if len(senlist) - digitnum < 1:
# continue
#ifin = ifinclude(name,linelist[0])
#if ifin:
if name.lower() in linelist[0].lower():
length = len(name)
startindex = linelist[0].lower().find(name.lower())
endindex = startindex + length
toreplace = linelist[0][startindex:endindex]
#newsen = linelist[0]
newsen = linelist[0].replace(toreplace,name)
if thistype not in allin:
#allin[thistype] = [linelist[0] + '\t' + linelist[1]]
allin[thistype] = {}
if whole not in allin[thistype]:
insen += 1
allin[thistype][whole] = [newsen]
#else:
# allin[thistype][whole].append(linelist[0])
else:
#allin[thistype].append(linelist[0] + '\t' + linelist[1])
if whole not in allin[thistype]:
insen += 1
allin[thistype][whole] = [newsen]
#else:
# allin[thistype][whole].append(linelist[0])
else:
########some filter
##ensure the entity has similar words in sen
# if name.lower() in linelist[0].lower():
# ###thisone will be used
# str1list = name.lower().split(' ') ####name
# nolowlist = name.split(' ')
# str2list = linelist[0].lower().split(' ') ####linelist
# ifin = False
# touselist = linelist[0].split(' ')
# for i in range(0, len(str2list)):
# if str1list[0] in str2list[i]:
# touselist[i] = nolowlist[0]
# for j in range(1,len(str1list)):
# touselist[i+j] = nolowlist[j]
# else:
# continue
# newsen = ' '.join(touselist)
# else:
# ####whether first similar 0.75 5
# str1list = name.lower().split(' ')
# tousestr = str1list[0]
# str2list = linelist[0].lower().split(' ')
# ifhave = 0
# index = -1
# for j in range(0,len(str2list)):
# thistoken = str2list[j]
# samenum = 0
# for k in range(min(len(tousestr),len(thistoken))):
# if tousestr[k] == thistoken[k]:
# samenum += 1
# else:
# break
# if min(len(tousestr),len(thistoken)) == 0:
# continue
# if samenum >= 5 or float(samenum) / float(min(len(tousestr),len(thistoken))) >= 0.75:
# ifhave = 1
# index = j
# break
# if not ifhave:
# continue
# else:
# ###replace
# newlinelist = linelist[0].split()[0:index] + name.split(' ') + linelist[0].split()[index+1:]
# newsen = " ".join(newlinelist)
if thistype not in allnotin:
#allnotin[thistype] = [linelist[0] + '\t' + linelist[1]]
allnotin[thistype] = {}
if whole not in allnotin[thistype]:
notinsen += 1
newsen = linelist[0] + " " + name
allnotin[thistype][whole] = [newsen]
#else:
# allnotin[thistype][whole].append(linelist[0])
else:
#allnotin[thistype].append(linelist[0] + '\t' + linelist[1])
if whole not in allnotin[thistype]:
notinsen += 1
newsen = linelist[0] + " " + name
allnotin[thistype][whole] = [newsen]
#else:
# allnotin[thistype][whole].append(linelist[0])
f.close()
print(insen)
print(notinsen)
# for key in allin:
# print(key+"\t"+str(len(allin[key])))
# for key in allnotin:
# print(key+"\t"+str(len(allnotin[key])))
# for key in allin:
# for one in allin[key]:
# for aa in allin[key][one]:
# print(aa+" "+one)
# for key in allnotin:
# for one in allnotin[key]:
# for aa in allnotin[key][one]:
# print(aa + " " + one)
finalres = {}
fall = open("allgenerate",'w')
for key in allnumber.keys():
finalres[key] = []
for key in allin:
for one in allin[key]:
for aa in allin[key][one]:
finalres[key].append(aa+"\t"+one)
fall.write(aa+"\t"+one+'\n')
for key in allnotin:
for one in allnotin[key]:
for aa in allnotin[key][one]:
finalres[key].append(aa+"\t"+one)
fall.write(aa + "\t" + one + '\n')
fall.close()
#for key in finalres.keys():
# print(len(finalres[key]))
sampleres = []
trainres = []
validres = []
for key in finalres.keys():
thissample = random.sample(finalres[key],allnumber[key])
#print(thissample)
sampleres.extend(thissample)
####divide to train and valid
thistrainnum = trainnumber[key]
indexlist = [i for i in range(allnumber[key])]
#print(indexlist)
trainuse = random.sample(indexlist,thistrainnum)
#print(trainuse)
for j in range(allnumber[key]):
if j in trainuse:
trainres.append(thissample[j])
else:
validres.append(thissample[j])
#print(trainres)
#print(validres)
#print(sampleres)
fo = open(outputfile, 'w')
for one in sampleres:
fo.write(one+"\n")
fo.close()
fot = open('train_mem.txt', 'w')
for one in trainres:
fot.write(one+"\n")
fot.close()
fov = open('valid_mem.txt', 'w')
for one in validres:
fov.write(one + "\n")
fov.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="latentRE")
parser.add_argument("--model", dest="model", type=str,
default="T5", help="{T5}")
parser.add_argument("--seed", dest="seed", type=int,
default=160, help="seed for network")
args = parser.parse_args()
seed_everything(args)
if args.model == "T5":
#seed 100
#train: person:10 location:12 org:6 mix:7
#valid: person:16 location:12 org:11 mix:8
print("right!")
# allnumber = {'org': 17, 'location': 24, 'person': 26, 'mix': 15}
# trainnumber = {'org': 6, 'location': 12, 'person': 10, 'mix': 7}
# allnumber = {'org':15,'location':14,'person':11,'mix':9}
# trainnumber = {'org':7,'location':8,'person':5,'mix':4}
allnumber = {'org': 16, 'location': 21, 'person': 20, 'mix': 16}
trainnumber = {'org': 7, 'location': 10, 'person': 11, 'mix': 6}
handlefile("pseudosamples", "allselect", allnumber, trainnumber)
else:
raise Exception("No such model! Please make sure that `model` takes the value in {T5}")
| 1.929688
| 2
|
MoinMoin/packages.py
|
RealTimeWeb/wikisite
| 1
|
12777223
|
# -*- coding: iso-8859-1 -*-
"""
MoinMoin - Package Installer
@copyright: 2005 MoinMoin:AlexanderSchremmer,
2007-2010 MoinMoin:ReimarBauer
@license: GNU GPL, see COPYING for details.
"""
import os, re, sys
import zipfile
from MoinMoin import config, wikiutil, caching, user
from MoinMoin.Page import Page
from MoinMoin.PageEditor import PageEditor
from MoinMoin.logfile import editlog, eventlog
from MoinMoin.util import filesys
MOIN_PACKAGE_FILE = 'MOIN_PACKAGE'
MAX_VERSION = 1
# Exceptions
class PackageException(Exception):
""" Raised if the package is broken. """
pass
class ScriptException(Exception):
""" Raised when there is a problem in the script. """
def __unicode__(self):
""" Return unicode error message """
if isinstance(self.args[0], str):
return unicode(self.args[0], config.charset)
else:
return unicode(self.args[0])
class RuntimeScriptException(ScriptException):
""" Raised when the script problem occurs at runtime. """
class ScriptExit(Exception):
""" Raised by the script commands when the script should quit. """
def event_logfile(self, pagename, pagefile):
# add event log entry
eventtype = 'SAVENEW'
mtime_usecs = wikiutil.timestamp2version(os.path.getmtime(pagefile))
elog = eventlog.EventLog(self.request)
elog.add(self.request, eventtype, {'pagename': pagename}, 1, mtime_usecs)
def edit_logfile_append(self, pagename, pagefile, rev, action, logname='edit-log', comment=u'', author=u"Scripting Subsystem"):
glog = editlog.EditLog(self.request, uid_override=author)
pagelog = Page(self.request, pagename).getPagePath(logname, use_underlay=0, isfile=1)
llog = editlog.EditLog(self.request, filename=pagelog,
uid_override=author)
mtime_usecs = wikiutil.timestamp2version(os.path.getmtime(pagefile))
host = '::1'
extra = u''
glog.add(self.request, mtime_usecs, rev, action, pagename, host, comment)
llog.add(self.request, mtime_usecs, rev, action, pagename, host, extra, comment)
event_logfile(self, pagename, pagefile)
# Parsing and (un)quoting for script files
def packLine(items, separator="|"):
""" Packs a list of items into a string that is separated by `separator`. """
return '|'.join([item.replace('\\', '\\\\').replace(separator, '\\' + separator) for item in items])
def unpackLine(string, separator="|"):
""" Unpacks a string that was packed by packLine. """
result = []
token = None
escaped = False
for char in string:
if token is None:
token = ""
if escaped and char in ('\\', separator):
token += char
escaped = False
continue
escaped = (char == '\\')
if escaped:
continue
if char == separator:
result.append(token)
token = ""
else:
token += char
if token is not None:
result.append(token)
return result
def str2boolean(string):
"""
Converts the parameter to a boolean value by recognising different
truth literals.
"""
return (string.lower() in ('yes', 'true', '1'))
class ScriptEngine:
"""
The script engine supplies the needed commands to execute the installation
script.
"""
def _extractToFile(self, source, target):
""" Extracts source and writes the contents into target. """
# TODO, add file dates
target_file = open(target, "wb")
target_file.write(self.extract_file(source))
target_file.close()
def __init__(self):
self.themename = None
self.ignoreExceptions = False
self.goto = 0
#Satisfy pylint
self.msg = getattr(self, "msg", "")
self.request = getattr(self, "request", None)
def do_addattachment(self, zipname, filename, pagename, author=u"Scripting Subsystem", comment=u""):
"""
Installs an attachment
@param pagename: Page where the file is attached. Or in 2.0, the file itself.
@param zipname: Filename of the attachment from the zip file
@param filename: Filename of the attachment (just applicable for MoinMoin < 2.0)
"""
if self.request.user.may.write(pagename):
_ = self.request.getText
attachments = Page(self.request, pagename).getPagePath("attachments", check_create=1)
filename = wikiutil.taintfilename(filename)
zipname = wikiutil.taintfilename(zipname)
target = os.path.join(attachments, filename)
page = PageEditor(self.request, pagename, do_editor_backup=0, uid_override=author)
rev = page.current_rev()
path = page.getPagePath(check_create=0)
if not os.path.exists(target):
self._extractToFile(zipname, target)
if os.path.exists(target):
filesys.chmod(target, 0666 & config.umask)
action = 'ATTNEW'
edit_logfile_append(self, pagename, path, rev, action, logname='edit-log',
comment=u'%(filename)s' % {"filename": filename}, author=author)
self.msg += u"%(filename)s attached \n" % {"filename": filename}
else:
self.msg += u"%(filename)s not attached \n" % {"filename": filename}
else:
self.msg += u"action add attachment: not enough rights - nothing done \n"
def do_delattachment(self, filename, pagename, author=u"Scripting Subsystem", comment=u""):
"""
Removes an attachment
@param pagename: Page where the file is attached. Or in 2.0, the file itself.
@param filename: Filename of the attachment (just applicable for MoinMoin < 2.0)
"""
if self.request.user.may.write(pagename):
_ = self.request.getText
attachments = Page(self.request, pagename).getPagePath("attachments", check_create=1)
filename = wikiutil.taintfilename(filename)
target = os.path.join(attachments, filename)
page = PageEditor(self.request, pagename, do_editor_backup=0, uid_override=author)
rev = page.current_rev()
path = page.getPagePath(check_create=0)
if os.path.exists(target):
os.remove(target)
action = 'ATTDEL'
edit_logfile_append(self, pagename, path, rev, action, logname='edit-log',
comment=u'%(filename)s' % {"filename": filename}, author=author)
self.msg += u"%(filename)s removed \n" % {"filename": filename}
else:
self.msg += u"%(filename)s does not exist \n" % {"filename": filename}
else:
self.msg += u"action delete attachment: not enough rights - nothing done \n"
def do_print(self, *param):
""" Prints the parameters into output of the script. """
self.msg += '; '.join(param) + "\n"
def do_exit(self):
""" Exits the script. """
raise ScriptExit
def do_ignoreexceptions(self, boolean):
""" Sets the ignore exceptions setting. If exceptions are ignored, the
script does not stop if one is encountered. """
self.ignoreExceptions = str2boolean(boolean)
def do_ensureversion(self, version, lines=0):
"""
Ensures that the version of MoinMoin is greater or equal than
version. If lines is unspecified, the script aborts. Otherwise,
the next lines (amount specified by lines) are not executed.
@param version: required version of MoinMoin (e.g. "1.3.4")
@param lines: lines to ignore
"""
_ = self.request.getText
from MoinMoin.version import release
version_int = [int(x) for x in version.split(".")]
# use a regex here to get only the numbers of the release string (e.g. ignore betaX)
release = re.compile('\d+').findall(release)[0:3]
release = [int(x) for x in release]
if version_int > release:
if lines > 0:
self.goto = lines
else:
raise RuntimeScriptException(_("The package needs a newer version"
" of MoinMoin (at least %s).") %
version)
def do_setthemename(self, themename):
""" Sets the name of the theme which will be altered next. """
self.themename = wikiutil.taintfilename(str(themename))
def do_copythemefile(self, filename, ftype, target):
""" Copies a theme-related file (CSS, PNG, etc.) into a directory of the
current theme.
@param filename: name of the file in this package
@param ftype: the subdirectory of the theme directory, e.g. "css"
@param target: filename, e.g. "screen.css"
"""
_ = self.request.getText
if self.themename is None:
raise RuntimeScriptException(_("The theme name is not set."))
from MoinMoin.web.static import STATIC_FILES_PATH as htdocs_dir
if not os.access(htdocs_dir, os.W_OK):
raise RuntimeScriptException(_("Theme files not installed! Write rights missing for %s.") % htdocs_dir)
theme_file = os.path.join(htdocs_dir, self.themename,
wikiutil.taintfilename(ftype),
wikiutil.taintfilename(target))
theme_dir = os.path.dirname(theme_file)
if not os.path.exists(theme_dir):
os.makedirs(theme_dir)
self._extractToFile(filename, theme_file)
def do_installplugin(self, filename, visibility, ptype, target):
"""
Installs a python code file into the appropriate directory.
@param filename: name of the file in this package
@param visibility: 'local' will copy it into the plugin folder of the
current wiki. 'global' will use the folder of the MoinMoin python
package.
@param ptype: the type of the plugin, e.g. "parser"
@param target: the filename of the plugin, e.g. wiki.py
"""
visibility = visibility.lower()
ptype = wikiutil.taintfilename(ptype.lower())
if visibility == 'global':
basedir = os.path.dirname(__import__("MoinMoin").__file__)
elif visibility == 'local':
basedir = self.request.cfg.plugin_dir
target = os.path.join(basedir, ptype, wikiutil.taintfilename(target))
self._extractToFile(filename, target)
wikiutil._wiki_plugins = {}
def do_installpackage(self, pagename, filename):
"""
Installs a package.
@param pagename: Page where the file is attached. Or in 2.0, the file itself.
@param filename: Filename of the attachment (just applicable for MoinMoin < 2.0)
"""
_ = self.request.getText
attachments = Page(self.request, pagename).getPagePath("attachments", check_create=0)
package = ZipPackage(self.request, os.path.join(attachments, wikiutil.taintfilename(filename)))
if package.isPackage():
if not package.installPackage():
raise RuntimeScriptException(_("Installation of '%(filename)s' failed.") % {
'filename': filename} + "\n" + package.msg)
else:
raise RuntimeScriptException(_('The file %s is not a MoinMoin package file.') % filename)
self.msg += package.msg
def do_addrevision(self, filename, pagename, author=u"Scripting Subsystem", comment=u"", trivial=u"No"):
""" Adds a revision to a page.
@param filename: name of the file in this package
@param pagename: name of the target page
@param author: user name of the editor (optional)
@param comment: comment related to this revision (optional)
@param trivial: boolean, if it is a trivial edit
"""
_ = self.request.getText
trivial = str2boolean(trivial)
if self.request.user.may.write(pagename):
page = PageEditor(self.request, pagename, do_editor_backup=0)
try:
page.saveText(self.extract_file(filename).decode("utf-8"), 0, trivial=trivial, comment=comment)
except PageEditor.Unchanged:
pass
else:
self.msg += u"%(pagename)s added \n" % {"pagename": pagename}
else:
self.msg += u"action add revision: not enough rights - nothing done \n"
def do_renamepage(self, pagename, newpagename, author=u"Scripting Subsystem", comment=u"Renamed by the scripting subsystem."):
""" Renames a page.
@param pagename: name of the target page
@param newpagename: name of the new page
@param author: user name of the editor (optional)
@param comment: comment related to this revision (optional)
"""
if self.request.user.may.write(pagename):
_ = self.request.getText
page = PageEditor(self.request, pagename, do_editor_backup=0, uid_override=author)
if not page.exists():
raise RuntimeScriptException(_("The page %s does not exist.") % pagename)
newpage = PageEditor(self.request, newpagename)
page.renamePage(newpage.page_name, comment=u"Renamed from '%s'" % (pagename))
self.msg += u'%(pagename)s renamed to %(newpagename)s\n' % {
"pagename": pagename,
"newpagename": newpagename}
else:
self.msg += u"action rename page: not enough rights - nothing done \n"
def do_deletepage(self, pagename, comment="Deleted by the scripting subsystem."):
""" Marks a page as deleted (like the DeletePage action).
@param pagename: page to delete
@param comment: the related comment (optional)
"""
if self.request.user.may.write(pagename):
_ = self.request.getText
page = PageEditor(self.request, pagename, do_editor_backup=0)
if not page.exists():
raise RuntimeScriptException(_("The page %s does not exist.") % pagename)
page.deletePage(comment)
else:
self.msg += u"action delete page: not enough rights - nothing done \n"
def do_replaceunderlayattachment(self, zipname, filename, pagename, author=u"Scripting Subsystem", comment=u""):
"""
overwrite underlay attachments
@param pagename: Page where the file is attached. Or in 2.0, the file itself.
@param zipname: Filename of the attachment from the zip file
@param filename: Filename of the attachment (just applicable for MoinMoin < 2.0)
"""
if self.request.user.may.write(pagename):
_ = self.request.getText
filename = wikiutil.taintfilename(filename)
zipname = wikiutil.taintfilename(zipname)
page = PageEditor(self.request, pagename, do_editor_backup=0, uid_override=author)
pagedir = page.getPagePath(use_underlay=1, check_create=1)
attachments = os.path.join(pagedir, 'attachments')
if not os.path.exists(attachments):
os.mkdir(attachments)
target = os.path.join(attachments, filename)
self._extractToFile(zipname, target)
if os.path.exists(target):
filesys.chmod(target, 0666 & config.umask)
else:
self.msg += u"action replace underlay attachment: not enough rights - nothing done \n"
def do_replaceunderlay(self, filename, pagename):
"""
Overwrites underlay pages. Implementational detail: This needs to be
kept in sync with the page class.
@param filename: name of the file in the package
@param pagename: page to be overwritten
"""
page = Page(self.request, pagename)
pagedir = page.getPagePath(use_underlay=1, check_create=1)
revdir = os.path.join(pagedir, 'revisions')
cfn = os.path.join(pagedir, 'current')
revstr = '%08d' % 1
if not os.path.exists(revdir):
os.mkdir(revdir)
currentf = open(cfn, 'w')
currentf.write(revstr + "\n")
currentf.close()
pagefile = os.path.join(revdir, revstr)
self._extractToFile(filename, pagefile)
# Clear caches
# TODO Code from MoinMoin/script/maint/cleancache.py may be used
def runScript(self, commands):
""" Runs the commands.
@param commands: list of strings which contain a command each
@return True on success
"""
_ = self.request.getText
headerline = unpackLine(commands[0])
if headerline[0].lower() != "MoinMoinPackage".lower():
raise PackageException(_("Invalid package file header."))
self.revision = int(headerline[1])
if self.revision > MAX_VERSION:
raise PackageException(_("Package file format unsupported."))
lineno = 1
success = True
for line in commands[1:]:
lineno += 1
if self.goto > 0:
self.goto -= 1
continue
if line.startswith("#") or len(line) == 0:
continue
elements = unpackLine(line)
fnname = elements[0].strip().lower()
if fnname == '':
continue
try:
if fnname in self.request.cfg.packagepages_actions_excluded:
self.msg += u"action package %s: excluded \n" % elements[0].strip()
success = False
continue
else:
fn = getattr(self, "do_" + fnname)
except AttributeError:
self.msg += u"Exception RuntimeScriptException: %s\n" % (
_("Unknown function %(func)s in line %(lineno)i.") %
{'func': elements[0], 'lineno': lineno}, )
success = False
break
try:
fn(*elements[1:])
except ScriptExit:
break
except TypeError, e:
self.msg += u"Exception %s (line %i): %s\n" % (e.__class__.__name__, lineno, unicode(e))
success = False
break
except RuntimeScriptException, e:
if not self.ignoreExceptions:
self.msg += u"Exception %s (line %i): %s\n" % (e.__class__.__name__, lineno, unicode(e))
success = False
break
return success
class Package:
""" A package consists of a bunch of files which can be installed. """
def __init__(self, request):
self.request = request
self.msg = ""
def installPackage(self):
""" Opens the package and executes the script. """
_ = self.request.getText
if not self.isPackage():
raise PackageException(_("The file %s was not found in the package.") % MOIN_PACKAGE_FILE)
commands = self.getScript().splitlines()
return self.runScript(commands)
def getScript(self):
""" Returns the script. """
return self.extract_file(MOIN_PACKAGE_FILE).decode("utf-8").replace(u"\ufeff", "")
def extract_file(self, filename):
""" Returns the contents of a file in the package. """
raise NotImplementedError
def filelist(self):
""" Returns a list of all files. """
raise NotImplementedError
def isPackage(self):
""" Returns true if this package is recognised. """
raise NotImplementedError
class ZipPackage(Package, ScriptEngine):
""" A package that reads its files from a .zip file. """
def __init__(self, request, filename):
""" Initialise the package.
@param request: RequestBase instance
@param filename: filename of the .zip file
"""
Package.__init__(self, request)
ScriptEngine.__init__(self)
self.filename = filename
self._isZipfile = zipfile.is_zipfile(filename)
if self._isZipfile:
self.zipfile = zipfile.ZipFile(filename)
# self.zipfile.getinfo(name)
def extract_file(self, filename):
""" Returns the contents of a file in the package. """
_ = self.request.getText
try:
return self.zipfile.read(filename.encode("cp437"))
except KeyError:
raise RuntimeScriptException(_(
"The file %s was not found in the package.") % filename)
def filelist(self):
""" Returns a list of all files. """
return self.zipfile.namelist()
def isPackage(self):
""" Returns true if this package is recognised. """
return self._isZipfile and MOIN_PACKAGE_FILE in self.zipfile.namelist()
def main():
args = sys.argv
if len(args)-1 not in (2, 3) or args[1] not in ('l', 'i'):
print >> sys.stderr, """MoinMoin Package Installer v%(version)i
%(myname)s action packagefile [request URL]
action - Either "l" for listing the script or "i" for installing.
packagefile - The path to the file containing the MoinMoin installer package
request URL - Just needed if you are running a wiki farm, used to differentiate
the correct wiki.
Example:
%(myname)s i ../package.zip
""" % {"version": MAX_VERSION, "myname": os.path.basename(args[0])}
raise SystemExit
packagefile = args[2]
if len(args) > 3:
request_url = args[3]
else:
request_url = None
# Setup MoinMoin environment
from MoinMoin.web.contexts import ScriptContext
request = ScriptContext(url=request_url)
package = ZipPackage(request, packagefile)
if not package.isPackage():
print "The specified file %s is not a package." % packagefile
raise SystemExit
if args[1] == 'l':
print package.getScript()
elif args[1] == 'i':
if package.installPackage():
print "Installation was successful!"
else:
print "Installation failed."
if package.msg:
print package.msg
if __name__ == '__main__':
main()
| 2.140625
| 2
|
emulation_execution_run.py
|
theuerse/emulation_lib
| 2
|
12777224
|
import os
import emulation_lib.ssh_lib as ssh
import logging
from datetime import datetime
from datetime import timedelta
from multiprocessing.dummy import Pool as ThreadPool
import time
from . import constants
CONFIG = {}
EXPECTED_RESULTFILES = {}
CONFIG_FILES = {}
REMOTE = 0
LOCAL = 1
setup_scripts = []
runtime_scripts = []
cmd = ""
logger = logging.getLogger("emulation_lib")
logger.setLevel(logging.INFO)
def inventorize_scripts():
global setup_scripts
global runtime_scripts
# setup-scripts
for filename in [f for f in os.listdir(CONFIG["COMMAND_DIR"]) if f.endswith(constants.SETUP_SCRIPT_POSTFIX)]:
name = filename.replace(constants.SETUP_SCRIPT_POSTFIX, "")
if name not in setup_scripts:
setup_scripts.append(name)
# runtime-scripts
for filename in [f for f in os.listdir(CONFIG["COMMAND_DIR"]) if f.endswith(constants.RUNTIME_SCRIPT_POSTFIX)]:
name = filename.replace(constants.RUNTIME_SCRIPT_POSTFIX, "")
if name not in runtime_scripts:
runtime_scripts.append(name)
return
def perform_sanity_checks():
for ip in setup_scripts:
if ip not in runtime_scripts:
raise ValueError(ip + " is missing a corresponding runtime-script, aborting ...")
for ip in runtime_scripts:
if ip not in setup_scripts:
raise ValueError(ip + " is missing a corresponding setup-script, aborting ...")
def perform_setup(ip):
s = ssh.Connection(ip, CONFIG["SSH_USER"], password=CONFIG["SSH_PASSWORD"])
# create folder structure
foldercmd = "mkdir -p " + CONFIG["REMOTE_EMULATION_DIR"] + " " + CONFIG["REMOTE_CONFIG_DIR"] + " " + CONFIG["REMOTE_RESULT_DIR"] + " " + CONFIG["REMOTE_DATA_DIR"]
s.execute(foldercmd)
target_setup_file = os.path.join(CONFIG["REMOTE_CONFIG_DIR"], constants.SETUP_SCRIPT_POSTFIX)
target_runtime_file = os.path.join(CONFIG["REMOTE_CONFIG_DIR"], constants.RUNTIME_SCRIPT_POSTFIX)
# transmit setup- and runtime-scripts
s.put(os.path.join(CONFIG["COMMAND_DIR"], ip + constants.SETUP_SCRIPT_POSTFIX), target_setup_file)
s.put(os.path.join(CONFIG["COMMAND_DIR"] + "/" + ip + constants.RUNTIME_SCRIPT_POSTFIX), target_runtime_file)
# transmit config-files
for config_file in CONFIG_FILES[ip]:
s.put(config_file[LOCAL], config_file[REMOTE]) # transmit config-file
s.execute("chmod +x " + target_setup_file)
result = s.execute(target_setup_file + " > /dev/null 2>&1 ; date -u; echo 'finished setup'") # wait for completion
logger.info(ip + ": " + str(result))
s.close()
return
def execute_runtime_script(ip):
s = ssh.Connection(ip, CONFIG["SSH_USER"], password=CONFIG["SSH_PASSWORD"])
result = s.execute("screen -d -m " + cmd)
#logger.info(result)
s.close()
return
def collect_traces(ip):
s = ssh.Connection(ip, CONFIG["SSH_USER"], password=CONFIG["SSH_PASSWORD"])
for fileTuple in EXPECTED_RESULTFILES[ip]:
parentdir = os.path.dirname(fileTuple[LOCAL])
if not os.path.isdir(parentdir):
os.makedirs(parentdir) # ensure local folder structure exists
if fileTuple[LOCAL].endswith(".zip"): # zip first
s.execute("rm " + fileTuple[REMOTE] + ".zip") # remove eventually already existing file
s.execute("cd " + os.path.dirname(fileTuple[REMOTE]) + " && zip -j " + os.path.basename(fileTuple[REMOTE]) +
".zip " + os.path.basename(fileTuple[REMOTE]))
s.get(fileTuple[REMOTE] + ".zip", fileTuple[LOCAL])
else:
s.get(fileTuple[REMOTE], fileTuple[LOCAL])
s.close()
#
# main entry-point of the program
#
def start_emulation_run(duration, expectedResultfiles, configFiles, config):
global cmd
global CONFIG
global EXPECTED_RESULTFILES
global CONFIG_FILES
CONFIG = config
EXPECTED_RESULTFILES = expectedResultfiles
CONFIG_FILES = configFiles
# inventorize scripts
inventorize_scripts()
# perform sanity-checks (e.g. there must be a runtime-script for every setup-script and vice versa)
perform_sanity_checks()
# deploy scripts + run all setup-scripts and await their termination
logger.info("Performing the setup (script-distribution + run all setup-scripts) ...")
pool = ThreadPool()
results = pool.map(perform_setup, setup_scripts)
pool.close()
pool.join()
# logger.info(results)
# run all runtime-scripts (async ssh-ops towards single starting-time for all nodes)
logger.info("Starting all runtime-scripts (" + datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') + ")")
start = datetime.utcnow() + timedelta(seconds=CONFIG["MIN_START_TIME_OFFSET"]) + timedelta(seconds=1)
start_time = start.strftime('%Y-%m-%d %H:%M:%S')
with open(os.path.join(CONFIG["RESULT_DIR"] + 'start_times.txt'), 'a') as time_index: # save common start time for every run
time_index.write(str(CONFIG["RUN"]) + '\t' + start_time + '\n')
logger.info("Coordinated start at: " + start_time)
# build runtime-script-command
cmd = "cmdScheduler " + os.path.join(CONFIG["REMOTE_CONFIG_DIR"],constants.RUNTIME_SCRIPT_POSTFIX) + " " + start_time
# call start-scripts
pool = ThreadPool()
pool.map(execute_runtime_script, setup_scripts)
pool.close()
pool.join()
logger.info("Waiting for emulation to end")
emulationEnd = start + timedelta(seconds=duration)
time.sleep((emulationEnd - datetime.utcnow()).seconds + 1) # '+1' ... account for rounding errors
# collect result-files
logger.info("Waiting five seconds for logfiles to be written (" + datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') + ")")
time.sleep(5) # wait for (eventual) logfiles to be written
logger.info("Collecting results (" + datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') + ")")
pool = ThreadPool()
pool.map(collect_traces, expectedResultfiles)
pool.close()
pool.join()
| 2.09375
| 2
|
AnkiTools/api/defaults.py
|
patarapolw/AnkiTools
| 53
|
12777225
|
<reponame>patarapolw/AnkiTools<filename>AnkiTools/api/defaults.py
import json
from collections import OrderedDict
from AnkiTools.tools.defaults import DEFAULT_API_PREFORMATTED_PAYLOAD
def get_default_payload(sample_params: dict,
add_note_template: dict=None,
preformatted_payload: str=str(DEFAULT_API_PREFORMATTED_PAYLOAD)):
if add_note_template is None:
add_note_template = dict()
note_type = sample_params['note_type']
assert isinstance(sample_params['data'], (dict, OrderedDict))
headers = list(sample_params['data'].keys())
contents = list(sample_params['data'].values())
forward = add_note_template.get('forward', "Forward")
backward = add_note_template.get('backward', "Backward")
has_backward = add_note_template.get('has_backward', False)
payload_str = preformatted_payload\
.replace('\\\\' + 'note_type', note_type)\
.replace('\\\\' + 'forward', forward)\
.replace('\\\\' + 'backward', backward)
for i, header in enumerate(headers):
payload_str = payload_str.replace('\\\\' + 'header' + str(i), header)
for i, content in enumerate(contents):
payload_str = payload_str.replace('\\\\' + 'content' + str(i), content)
payload = json.loads(payload_str, object_pairs_hook=OrderedDict)
if not has_backward:
payload['data'][note_type][0]['decks'].pop(backward)
payload['definitions'][note_type]['templates'].pop()
return payload
| 2.171875
| 2
|
panoptes_client/subject_set.py
|
RonaCostello/panoptes-python-client
| 0
|
12777226
|
from __future__ import absolute_import, division, print_function
from builtins import str
from panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.set_member_subject import SetMemberSubject
from panoptes_client.subject import Subject
from panoptes_client.utils import batchable
class SubjectSet(PanoptesObject):
_api_slug = 'subject_sets'
_link_slug = 'subject_sets'
_edit_attributes = (
'display_name',
{
'links': (
'project',
),
'metadata': (
'category',
)
},
)
@property
def subjects(self):
"""
A generator which yields :py:class:`.Subject` objects which are in this
subject set.
Examples::
for subject in subject_set.subjects:
print(subject.id)
"""
for sms in SetMemberSubject.where(subject_set_id=self.id):
yield sms.links.subject
@batchable
def add(self, subjects):
"""
Links the given subjects to this set.
- **subjects** can be a list of :py:class:`.Subject` instances, a list
of subject IDs, a single :py:class:`.Subject` instance, or a single
subject ID.
Examples::
subject_set.add(1234)
subject_set.add([1,2,3,4])
subject_set.add(Subject(1234))
subject_set.add([Subject(12), Subject(34)])
"""
_subjects = self._build_subject_list(subjects)
self.http_post(
'{}/links/subjects'.format(self.id),
json={'subjects': _subjects}
)
@batchable
def remove(self, subjects):
"""
Unlinks the given subjects from this set.
- **subjects** can be a list of :py:class:`.Subject` instances, a list
of subject IDs, a single :py:class:`.Subject` instance, or a single
subject ID.
Examples::
subject_set.remove(1234)
subject_set.remove([1,2,3,4])
subject_set.remove(Subject(1234))
subject_set.remove([Subject(12), Subject(34)])
"""
_subjects = self._build_subject_list(subjects)
_subjects_ids = ",".join(_subjects)
self.http_delete(
'{}/links/subjects/{}'.format(self.id, _subjects_ids)
)
def __contains__(self, subject):
"""
Tests if the subject is linked to the subject_set.
- **subject** a single :py:class:`.Subject` instance, or a single
subject ID.
Returns a boolean indicating if the subject is linked to the
subject_set.
Examples::
1234 in subject_set
Subject(1234) in subject_set
"""
if isinstance(subject, Subject):
_subject_id = str(subject.id)
else:
_subject_id = str(subject)
linked_subject_count = SetMemberSubject.where(
subject_set_id=self.id,
subject_id=_subject_id
).object_count
return linked_subject_count == 1
def _build_subject_list(self, subjects):
_subjects = []
for subject in subjects:
if not (
isinstance(subject, Subject)
or isinstance(subject, (int, str,))
):
raise TypeError
if isinstance(subject, Subject):
_subject_id = str(subject.id)
else:
_subject_id = str(subject)
_subjects.append(_subject_id)
return _subjects
LinkResolver.register(SubjectSet)
LinkResolver.register(SubjectSet, 'subject_set')
| 2.25
| 2
|
bot/exts/error_handler.py
|
python-discord/sir-robin
| 12
|
12777227
|
from discord import Colour, Embed
from discord.ext.commands import (BadArgument, Cog, CommandError,
CommandNotFound, Context,
MissingRequiredArgument)
from bot.bot import SirRobin
from bot.log import get_logger
log = get_logger(__name__)
class ErrorHandler(Cog):
"""Handles errors emitted from commands."""
def __init__(self, bot: SirRobin):
self.bot = bot
@staticmethod
def _get_error_embed(title: str, body: str) -> Embed:
"""Return a embed with our error colour assigned."""
return Embed(
title=title,
colour=Colour.brand_red(),
description=body
)
@Cog.listener()
async def on_command_error(self, ctx: Context, error: CommandError) -> None:
"""
Generic command error handling from other cogs.
Using the error type, handle the error appropriately.
if there is no handling for the error type raised,
a message will be sent to the user & it will be logged.
In the future, I would expect this to be used as a place
to push errors to a sentry instance.
"""
log.trace(f"Handling a raised error {error} from {ctx.command}")
# We could handle the subclasses of UserInputError errors together, using the error
# name as the embed title. Before doing this we would have to verify that all messages
# attached to subclasses of this error are human-readable, as they are user facing.
if isinstance(error, BadArgument):
embed = self._get_error_embed("Bad argument", str(error))
await ctx.send(embed=embed)
return
elif isinstance(error, CommandNotFound):
embed = self._get_error_embed("Command not found", str(error))
await ctx.send(embed=embed)
return
elif isinstance(error, MissingRequiredArgument):
embed = self._get_error_embed("Missing required argument", str(error))
await ctx.send(embed=embed)
return
# If we haven't handled it by this point, it is considered an unexpected/handled error.
await ctx.send(
f"Sorry, an unexpected error occurred. Please let us know!\n\n"
f"```{error.__class__.__name__}: {error}```"
)
log.error(f"Error executing command invoked by {ctx.message.author}: {ctx.message.content}", exc_info=error)
async def setup(bot: SirRobin) -> None:
"""Load the ErrorHandler cog."""
await bot.add_cog(ErrorHandler(bot))
| 2.59375
| 3
|
tests/suite/test_virtual_server_tls_redirect.py
|
saptagiri1983/kubernetes-ingress
| 1
|
12777228
|
<gh_stars>1-10
import pytest
import requests
from settings import TEST_DATA
from suite.custom_resources_utils import patch_virtual_server_from_yaml
from suite.resources_utils import wait_before_test
@pytest.mark.vs
@pytest.mark.parametrize('crd_ingress_controller, virtual_server_setup',
[({"type": "complete", "extra_args": [f"-enable-custom-resources"]},
{"example": "virtual-server-tls-redirect", "app_type": "simple"})],
indirect=True)
class TestVirtualServerTLSRedirect:
def test_tls_redirect_defaults(self, kube_apis, crd_ingress_controller, virtual_server_setup):
patch_virtual_server_from_yaml(kube_apis.custom_objects,
virtual_server_setup.vs_name,
f"{TEST_DATA}/virtual-server-tls-redirect/virtual-server-default-redirect.yaml",
virtual_server_setup.namespace)
wait_before_test(1)
resp_1 = requests.get(virtual_server_setup.backend_1_url,
headers={"host": virtual_server_setup.vs_host},
allow_redirects=False)
resp_2 = requests.get(virtual_server_setup.backend_2_url,
headers={"host": virtual_server_setup.vs_host},
allow_redirects=False)
assert resp_1.status_code == 301, "Expected: a redirect for scheme=http"
assert resp_2.status_code == 301, "Expected: a redirect for scheme=http"
resp_3 = requests.get(virtual_server_setup.backend_1_url_ssl,
headers={"host": virtual_server_setup.vs_host},
allow_redirects=False, verify=False)
resp_4 = requests.get(virtual_server_setup.backend_2_url_ssl,
headers={"host": virtual_server_setup.vs_host},
allow_redirects=False, verify=False)
assert resp_3.status_code == 200, "Expected: no redirect for scheme=https"
assert resp_4.status_code == 200, "Expected: no redirect for scheme=https"
def test_tls_redirect_based_on_header(self, kube_apis, crd_ingress_controller, virtual_server_setup):
patch_virtual_server_from_yaml(kube_apis.custom_objects,
virtual_server_setup.vs_name,
f"{TEST_DATA}/virtual-server-tls-redirect/virtual-server-header-redirect.yaml",
virtual_server_setup.namespace)
wait_before_test(1)
resp_1 = requests.get(virtual_server_setup.backend_1_url,
headers={"host": virtual_server_setup.vs_host, "x-forwarded-proto": "http"},
allow_redirects=False)
resp_2 = requests.get(virtual_server_setup.backend_2_url,
headers={"host": virtual_server_setup.vs_host, "x-forwarded-proto": "http"},
allow_redirects=False)
assert resp_1.status_code == 308, "Expected: a redirect for x-forwarded-proto=http"
assert resp_2.status_code == 308, "Expected: a redirect for x-forwarded-proto=http"
resp_3 = requests.get(virtual_server_setup.backend_1_url_ssl,
headers={"host": virtual_server_setup.vs_host, "x-forwarded-proto": "http"},
allow_redirects=False, verify=False)
resp_4 = requests.get(virtual_server_setup.backend_2_url_ssl,
headers={"host": virtual_server_setup.vs_host, "x-forwarded-proto": "http"},
allow_redirects=False, verify=False)
assert resp_3.status_code == 308, "Expected: a redirect for x-forwarded-proto=http"
assert resp_4.status_code == 308, "Expected: a redirect for x-forwarded-proto=http"
resp_5 = requests.get(virtual_server_setup.backend_1_url,
headers={"host": virtual_server_setup.vs_host, "x-forwarded-proto": "https"},
allow_redirects=False)
resp_6 = requests.get(virtual_server_setup.backend_2_url,
headers={"host": virtual_server_setup.vs_host, "x-forwarded-proto": "https"},
allow_redirects=False)
assert resp_5.status_code == 200, "Expected: no redirect for x-forwarded-proto=https"
assert resp_6.status_code == 200, "Expected: no redirect for x-forwarded-proto=https"
resp_7 = requests.get(virtual_server_setup.backend_1_url_ssl,
headers={"host": virtual_server_setup.vs_host, "x-forwarded-proto": "https"},
allow_redirects=False, verify=False)
resp_8 = requests.get(virtual_server_setup.backend_2_url_ssl,
headers={"host": virtual_server_setup.vs_host, "x-forwarded-proto": "https"},
allow_redirects=False, verify=False)
assert resp_7.status_code == 200, "Expected: no redirect for x-forwarded-proto=https"
assert resp_8.status_code == 200, "Expected: no redirect for x-forwarded-proto=https"
def test_tls_redirect_based_on_scheme(self, kube_apis, crd_ingress_controller, virtual_server_setup):
patch_virtual_server_from_yaml(kube_apis.custom_objects,
virtual_server_setup.vs_name,
f"{TEST_DATA}/virtual-server-tls-redirect/virtual-server-scheme-redirect.yaml",
virtual_server_setup.namespace)
wait_before_test(1)
resp_1 = requests.get(virtual_server_setup.backend_1_url,
headers={"host": virtual_server_setup.vs_host},
allow_redirects=False)
resp_2 = requests.get(virtual_server_setup.backend_2_url,
headers={"host": virtual_server_setup.vs_host},
allow_redirects=False)
assert resp_1.status_code == 302, "Expected: a redirect for scheme=http"
assert resp_2.status_code == 302, "Expected: a redirect for scheme=http"
resp_3 = requests.get(virtual_server_setup.backend_1_url_ssl,
headers={"host": virtual_server_setup.vs_host},
allow_redirects=False, verify=False)
resp_4 = requests.get(virtual_server_setup.backend_2_url_ssl,
headers={"host": virtual_server_setup.vs_host},
allow_redirects=False, verify=False)
assert resp_3.status_code == 200, "Expected: no redirect for scheme=https"
assert resp_4.status_code == 200, "Expected: no redirect for scheme=https"
| 2.140625
| 2
|
sv_pdl/atlas/management/commands/upload_atlas_tarball.py
|
eldarion-client/scaife-viewer
| 70
|
12777229
|
import os
import shlex
import subprocess
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Compresses / uploads an ATLAS database tarball
"""
help = "Compresses / uploads an ATLAS database tarball"
@staticmethod
def do_shell_command(command_string):
result = subprocess.check_output(shlex.split(command_string))
return result.decode("utf-8")
def handle(self, *args, **options):
database_path = settings.SV_ATLAS_DB_PATH
if database_path is None:
msg = "The SV_ATLAS_DB_PATH setting is missing and is required for this management command to work."
raise ImproperlyConfigured(msg)
self.stdout.write(
"--[Creating / uploading database tarball]--"
)
database_file = os.path.basename(database_path)
result = self.do_shell_command(f"md5sum {database_path}")
md5sha = result.split(" ")[0]
self.stdout.write(f"{database_path} md5 sha: {md5sha}")
database_dir = os.path.dirname(database_path)
os.chdir(database_dir)
compressed_db_filename = f"db-{md5sha}.tgz"
self.stdout.write(f"Compressing {database_path} as {compressed_db_filename} ")
tar_cmd = f"tar -cvzf {compressed_db_filename} {database_file}"
self.do_shell_command(tar_cmd)
bucket = "atlas-db-tarballs"
site = "sv-pdl"
self.stdout.write(f"Uploading {compressed_db_filename} to {bucket}")
gsutil_cmd = f"gsutil -m cp -a public-read {compressed_db_filename} gs://{bucket}/{site}/{compressed_db_filename}"
self.do_shell_command(gsutil_cmd)
url = f"https://storage.googleapis.com/{bucket}/{site}/{compressed_db_filename}"
self.stdout.write(f"Uploaded to {url}")
self.stdout.write(f"Removing {compressed_db_filename}")
rm_cmd = f"rm {compressed_db_filename}"
self.do_shell_command(rm_cmd)
self.stdout.write(f"Writing {url} to .atlas-db-url")
atlas_db_url_path = os.path.join(
settings.PROJECT_ROOT,
".atlas-db-url"
)
with open(atlas_db_url_path, "w") as f:
f.write(url)
self.stdout.write("--[Done!]--")
# NOTE: run export ATLAS_DB_URL=$(cat .atlas-db-url)
# to populate $ATLAS_DB_URL
| 2.234375
| 2
|
bin/api/texsyn.py
|
wx-csy/P5C
| 1
|
12777230
|
from .. import lang, prob
from .. import common as com
import csv, sys, pathlib
def contest() :
com.setroot()
print(r'\input{../../resource/statement/stat.tex}')
print(r'\begin{document}')
meta:dict = prob.load_problist()
for shortname in sorted(meta, key=lambda k:meta[k]['order']) :
print(r'\subimport{../../contest/%s/statement/}{stat.tex}' % shortname)
print(r'\end{document}')
| 2.125
| 2
|
django_price/settings.py
|
holg/django_price
| 4
|
12777231
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
DEFAULT_CURRENCY = getattr(settings, 'PRICE_DEFAULT_CURRENCY', None)
| 1.695313
| 2
|
backend/tests/routes/mobile_access_delete_registed.py
|
Zaptyp/wulkanowy-web
| 0
|
12777232
|
<reponame>Zaptyp/wulkanowy-web
from tests.checks.status_code import status_check
from tests.routes.login import client
def mobile_access_delete_registed_test(
session_data, headers, student, school_id, host, symbol, ssl, id_mobile_deleted, fg
):
response = client.post(
"/api/v1/uonetplus-uczen/mobile-access/delete-registered-device",
headers={"Content-Type": "application/json"},
json={
"session_data": session_data,
"student": student,
"school_id": school_id,
"host": host,
"symbol": symbol,
"ssl": ssl,
"json": {"id": id_mobile_deleted},
"headers": headers,
},
)
status_check(response.status_code, response.json(), fg)
assert response.json()["success"] == True
| 2.109375
| 2
|
ds/util/seed_everything.py
|
Hattyoriiiiiii/snippets
| 0
|
12777233
|
def seed_everything(seed=2020):
random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed)
np.random.seed(seed)
tf.random.set_seed(seed)
seed_everything(42)
| 1.859375
| 2
|
tests/test_backends/test_zone.py
|
luhn/limited
| 0
|
12777234
|
from typing import Dict
import pytest
from limited import Zone
from limited.exceptions import LimitExceededException
class MockZone(Zone):
buckets: Dict[str, int]
def __init__(self, size: int = 10, rate: float = 1.0):
self.rate = rate
self.size = size
self.buckets = dict()
def count(self, key: str) -> int:
return self.buckets[key]
def remove(self, key: str, count: int) -> bool:
if self.buckets[key] >= count:
self.buckets[key] -= count
return True
else:
return False
def test_zone_ttl():
zone = MockZone(size=10, rate=2.0)
assert zone.ttl == 5.0
def test_zone_check():
zone = MockZone()
zone.buckets['a'] = 5
zone.buckets['b'] = 0
assert zone.check('a')
assert not zone.check('b')
def test_zone_increment():
zone = MockZone()
zone.buckets['a'] = 5
zone.increment('a')
assert zone.buckets['a'] == 4
def test_zone_limit():
zone = MockZone()
zone.buckets['a'] = 5
zone.buckets['b'] = 0
assert not zone.limit('a')
assert zone.limit('b')
assert zone.buckets['a'] == 4
assert zone.buckets['b'] == 0
def test_zone_hard_limit():
zone = MockZone()
zone.buckets['a'] = 5
zone.buckets['b'] = 0
zone.hard_limit('a')
with pytest.raises(LimitExceededException):
zone.hard_limit('b')
| 2.640625
| 3
|
kong_admin/admin.py
|
veris-neerajdhiman/django-kong-admin
| 0
|
12777235
|
<filename>kong_admin/admin.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from jsonfield2.fields import JSONField
from .models import APIReference, PluginConfigurationReference, ConsumerReference, \
BasicAuthReference, KeyAuthReference, OAuth2Reference
from .views import synchronize_api_reference, synchronize_api_references, synchronize_consumer_reference, \
synchronize_consumer_references
from .contrib import ActionButtonModelAdmin
from .widgets import JSONWidget
def get_toggle_enable_caption(obj):
return 'Disable' if obj.enabled else 'Enable'
class PluginConfigurationReferenceInline(admin.StackedInline):
model = PluginConfigurationReference
extra = 0
fields = ('plugin', 'config', 'enabled', 'consumer')
formfield_overrides = {
JSONField: {'widget': JSONWidget(mode='json', width='800px', height='180px', theme='twilight')},
}
class APIReferenceAdmin(ActionButtonModelAdmin):
list_display = ('upstream_url', 'name', 'request_host', 'preserve_host', 'request_path', 'strip_request_path',
'enabled', 'synchronized', 'kong_id')
list_display_buttons = [{
'caption': 'Synchronize',
'url': 'sync-api-ref/',
'view': synchronize_api_reference
}, {
'caption': get_toggle_enable_caption,
'url': 'toggle-enable/',
'view': lambda request, pk: synchronize_api_reference(request, pk, toggle_enable=True)
}]
action_buttons = [{
'caption': 'Synchronize all',
'url': 'sync-api-refs/',
'view': synchronize_api_references
}]
list_select_related = True
fieldsets = (
(None, {
'fields': ('upstream_url', 'name', 'enabled')
}),
(_('Host'), {
'fields': ('request_host', 'preserve_host')
}),
(_('Path'), {
'fields': ('request_path', 'strip_request_path')
}),
(_('Audit'), {
'fields': ('created_at', 'updated_at')
}),
)
inlines = [
PluginConfigurationReferenceInline
]
readonly_fields = ('created_at', 'updated_at')
admin.site.register(APIReference, APIReferenceAdmin)
class BasicAuthInline(admin.StackedInline):
model = BasicAuthReference
extra = 0
fields = ('username', 'password')
class KeyAuthInline(admin.StackedInline):
model = KeyAuthReference
extra = 0
fields = ('key',)
class OAuthInline(admin.StackedInline):
model = OAuth2Reference
extra = 0
fields = ('name', 'redirect_uri', 'client_id', 'client_secret')
class ConsumerReferenceAdmin(ActionButtonModelAdmin):
list_display = ('username_or_custom_id', 'enabled', 'synchronized', 'kong_id')
list_display_buttons = [{
'caption': 'Synchronize',
'url': 'sync-consumer-ref/',
'view': synchronize_consumer_reference
}, {
'caption': get_toggle_enable_caption,
'url': 'toggle-enable/',
'view': lambda request, pk: synchronize_consumer_reference(request, pk, toggle_enable=True)
}]
action_buttons = [{
'caption': 'Synchronize all',
'url': 'sync-consumer-refs/',
'view': synchronize_consumer_references
}]
list_select_related = True
fieldsets = (
(None, {
'fields': ('username', 'custom_id', 'enabled')
}),
(_('Audit'), {
'fields': ('created_at', 'updated_at')
}),
)
readonly_fields = ('created_at', 'updated_at')
inlines = [
BasicAuthInline,
KeyAuthInline,
OAuthInline
]
def username_or_custom_id(self, obj):
return obj.username or obj.custom_id
admin.site.register(ConsumerReference, ConsumerReferenceAdmin)
| 1.75
| 2
|
libs/applus/applus/django/db/models/manager.py
|
cnicgpaul123/killNCP
| 5
|
12777236
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
""" ManagerCacheMixin """
# pylint: disable=too-few-public-methods,no-self-use
from django.conf import settings
from django.core.cache import caches, cache
from django.utils import functional
from django.utils import module_loading
class ManagerCacheMixin:
""" ManagerCacheMixin """
SERIALIZER_CLASS = ""
@functional.cached_property
def serializer_class(self):
""" Return the class to use for the serializer. """
if isinstance(self.SERIALIZER_CLASS, str):
return module_loading.import_string(self.SERIALIZER_CLASS)
return self.SERIALIZER_CLASS
@functional.cached_property
def cache(self):
""" 缓存池 """
try:
return caches[settings.DAO_CACHE_ALIAS]
except AttributeError:
return cache
def get_cache_key(self, **values):
""" 缓存 KEY """
raise NotImplementedError
def clear_cache(self, **values):
""" 清除缓存 """
self.cache.delete(self.get_cache_key(**values))
def load_cache(self, lookup=True, **values):
""" 从缓存加载实例 """
cache_key = self.get_cache_key(**values)
serialized = self.cache.get(cache_key)
if serialized is not None:
return self.model(**serialized)
if not lookup:
return None
instance = self.get(**values)
serialized = self.serializer_class(instance).data
self.cache.set(cache_key, serialized)
return instance
| 2.109375
| 2
|
tests/test_s3.py
|
calebmarcus/awacs
| 0
|
12777237
|
<gh_stars>0
import unittest
from awacs.s3 import ARN
class TestARN(unittest.TestCase):
def test_aws(self):
arn = ARN("bucket/key", "us-east-1", "account")
self.assertEqual(arn.JSONrepr(), "arn:aws:s3:::bucket/key")
def test_cn(self):
arn = ARN("bucket/key", "cn-north-1", "account")
self.assertEqual(arn.JSONrepr(), "arn:aws-cn:s3:::bucket/key")
def test_gov(self):
arn = ARN("bucket/key", "us-gov-west-1", "account")
self.assertEqual(arn.JSONrepr(), "arn:aws-us-gov:s3:::bucket/key")
| 2.78125
| 3
|
contrastive_rl/ant_envs.py
|
dumpmemory/google-research
| 0
|
12777238
|
<reponame>dumpmemory/google-research
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for loading the AntMaze environments."""
import d4rl
import numpy as np
R = 'r'
G = 'g'
U_MAZE = [[1, 1, 1, 1, 1],
[1, R, G, G, 1],
[1, 1, 1, G, 1],
[1, G, G, G, 1],
[1, 1, 1, 1, 1]]
BIG_MAZE = [[1, 1, 1, 1, 1, 1, 1, 1],
[1, R, G, 1, 1, G, G, 1],
[1, G, G, 1, G, G, G, 1],
[1, 1, G, G, G, 1, 1, 1],
[1, G, G, 1, G, G, G, 1],
[1, G, 1, G, G, 1, G, 1],
[1, G, G, G, 1, G, G, 1],
[1, 1, 1, 1, 1, 1, 1, 1]]
HARDEST_MAZE = [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, R, G, G, G, 1, G, G, G, G, G, 1],
[1, G, 1, 1, G, 1, G, 1, G, 1, G, 1],
[1, G, G, G, G, G, G, 1, G, G, G, 1],
[1, G, 1, 1, 1, 1, G, 1, 1, 1, G, 1],
[1, G, G, 1, G, 1, G, G, G, G, G, 1],
[1, 1, G, 1, G, 1, G, 1, G, 1, 1, 1],
[1, G, G, 1, G, G, G, 1, G, G, G, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
class AntMaze(d4rl.locomotion.ant.AntMazeEnv):
"""Utility wrapper for the AntMaze environments.
For comparisons in the offline RL setting, we used unmodified AntMaze tasks,
without this wrapper.
"""
def __init__(self, map_name, non_zero_reset=False):
self._goal_obs = np.zeros(29)
if map_name == 'umaze':
maze_map = U_MAZE
elif map_name == 'medium':
maze_map = BIG_MAZE
elif map_name == 'large':
maze_map = HARDEST_MAZE
else:
raise NotImplementedError
super(AntMaze, self).__init__(maze_map=maze_map,
reward_type='sparse',
non_zero_reset=non_zero_reset,
eval=True,
maze_size_scaling=4.0,
high=np.full((58,), np.inf),
dtype=np.float32)
def reset(self):
super(AntMaze, self).reset()
goal_xy = self._goal_sampler(np.random)
state = self.sim.get_state()
state = state._replace(
qpos=np.concatenate([goal_xy, state.qpos[2:]]))
self.sim.set_state(state)
for _ in range(50):
self.do_simulation(np.zeros(8), self.frame_skip)
self._goal_obs = self.BASE_ENV._get_obs(self).copy() # pylint: disable=protected-access
super(AntMaze, self).reset()
return self._get_obs()
def step(self, action):
super(AntMaze, self).step(action)
s = self._get_obs()
dist = np.linalg.norm(self._goal_obs[:2] - s[:2])
# Distance threshold from [RIS, Chane-Sane '21] and [UPN, Srinivas '18].
r = (dist <= 0.5)
done = False
info = {}
return s, r, done, info
def _get_obs(self):
assert self._expose_all_qpos # pylint: disable=protected-access
s = self.BASE_ENV._get_obs(self) # pylint: disable=protected-access
return np.concatenate([s, self._goal_obs]).astype(np.float32)
def _get_reset_location(self):
if np.random.random() < 0.5:
return super(AntMaze, self)._get_reset_location()
else:
return self._goal_sampler(np.random)
| 1.976563
| 2
|
research_mnist/test.py
|
Gaon-Choi/CSE4007
| 0
|
12777239
|
<reponame>Gaon-Choi/CSE4007
import matplotlib.pyplot as plt
import sklearn.linear_model
import sklearn.discriminant_analysis
import sklearn.svm
import sklearn.neighbors
import sklearn.neural_network
from sklearn import datasets
from sklearn.model_selection import train_test_split
import numpy as np
import time
from openTSNE.sklearn import TSNE
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
MAX_ITER = 1000000
mnist = datasets.load_digits()
# flatten the images
n_samples = len(mnist.images)
data = mnist.data.reshape((n_samples, -1))
X_train, X_test, y_train, y_test = train_test_split(data, mnist.target, test_size = 0.3, shuffle=False)
pca = TSNE(n_components=2)
X2D = pca.fit_transform(X_train)
# X2D = pca.transform(X_train)
X2D_ = pca.transform(X_test)
clf = sklearn.neural_network.MLPClassifier(
solver='sgd', alpha=1e-5, hidden_layer_sizes=(64, 10, 10), max_iter=MAX_ITER, activation='relu'
)
clf.fit(X2D, y_train)
grid_size = 500
A, B = np.meshgrid(np.linspace(X2D_[:, 0].min(), X2D_[:, 0].max(), grid_size),
np.linspace(X2D_[:, 1].min(), X2D_[:, 1].max(), grid_size))
C = clf.predict( np.hstack([A.reshape(-1, 1), B.reshape(-1, 1)]) ).reshape(grid_size, grid_size)
plt.contourf(A, B, C, alpha=0.3, cmap=plt.cm.gnuplot2)
predicted = clf.predict(X2D_)
accuracy = (predicted == y_test).mean()
plt.xlabel("Accuracy: " + str(round(accuracy, 2)))
colors = 'r', 'g', 'b', 'c', 'm', 'y', 'k', 'yellow', 'orange', 'purple'
for i, c, label in zip(range(len(X2D_)), colors, mnist.target_names):
plt.scatter(X2D_[y_test == i, 0], X2D_[y_test == i, 1], c=c, label=label, s=10)
plt.title("Decision Boundary : MLP classifier(with ReLU)")
plt.show()
| 2.546875
| 3
|
Interface/Main.py
|
cevhersoylemez/DecisionTreeForClassification
| 2
|
12777240
|
from msilib import Table
from tkinter import *
import tkinter as tk
from tkinter import filedialog
from pandastable import Table,TableModel
import pandas as pd
from Hesapla import MC_Karar_Agaci
#gerekli değişkenler
test_sinir_indeks = 0
#pencere oluşturma
root = Tk()
root.title("Karar Ağacı Projesi")
root.geometry("1480x800")
frame1 = Frame(root)
frame1.pack()
#label ekleme
title = Label(frame1,text="\n Cevher ile Muhammed'in sınıflandırma için otomatik karar ağacı programına hoşgeldiniz..\n\n",font=16,fg="purple")
title.grid()
def Load():
root2 = Tk()
root2.withdraw()
global file_path
file_path = filedialog.askopenfilename(filetypes=(("All files", "*.*"),("Csv Files", "*.csv"),("Data Files", "*.data")))
global dataset
dataset = pd.read_csv(file_path)
frame2 = Frame(root)
frame2.pack(side=BOTTOM) # asagıda cerceve olusmasını sağladık.
pt = Table(frame2, dataframe=dataset, showstatusbar=True, showtoolbar=True,width=1000,height=500)
pt.show()
def getResult():
root3 = Tk()
root3.title("Model - Başarı")
root3.geometry("1480x800")
test_sinir_indeks = int(trainingLimitEntry.get())
trainData = dataset.iloc[0:test_sinir_indeks] # train
testData = dataset.iloc[test_sinir_indeks:dataset.shape[0]+1] # test
# model oluştur.
MC = MC_Karar_Agaci()
hedefNitelikAdi = targetColumnEntry.get()
R,model = MC.modelOlustur(trainData, hedefNitelikAdi)
# Tahmin yap
print("\n")
sonuc = MC.tahminEt(root=R, test=testData, i=test_sinir_indeks) # i test verisinin kaçıncı indisten başladığı.
print("Tahmin sonucu :", sonuc)
frame3 = Frame(root3)
frame3.pack(side=LEFT)
listbox = Listbox(frame3,width=50, height=50,font=16)
for i in model:
listbox.insert(END,i)
listbox.pack(fill=BOTH, expand=0)
frame4 = Frame(root3)
frame4.pack(side=RIGHT)
score=0
index = 0
for i in testData[hedefNitelikAdi]:
if i == sonuc[index]:
score = score + 1
if len(sonuc)-1 == index:
break
index = index + 1
accuracy_score = score / len(testData[hedefNitelikAdi])
print(accuracy_score)
list = []
list.append("Sonuçlar")
list.append("Accuracy Score : " + str(accuracy_score))
list.append("")
list.append("")
for i in range(len(sonuc)):
list.append("P:" + str(sonuc[i])+" T:" + str(testData.iloc[i][hedefNitelikAdi]))
listbox2 = Listbox(frame4, width=50, height=50,font=16)
for i in list:
listbox2.insert(END, i)
listbox2.pack(fill=BOTH, expand=0)
root3.mainloop()
LoadDatasetBtn = Button(frame1, text=" Dataset seç ", fg="blue", command=Load,font=16)
LoadDatasetBtn.grid(row=2)
spacerLabel = Label(frame1,text=" ")
spacerLabel.grid(row=3, column=0, sticky=W, pady=1)
targetColumnLabel = Label(frame1,text="Hedef Kolonu Giriniz: \n",font=14)
targetColumnLabel.grid(row=4, column=0, sticky=W, pady=1)
targetColumnEntry = Entry(frame1,font=14)
targetColumnEntry.grid(row=4, column=0,sticky=N)
maxDeptLabel = Label(frame1,text="*iptal* Maksimum Derinlik: \n",font=14)
maxDeptLabel.grid(row=5, column=0, sticky=W)
maxDeptEntry = Entry(frame1,font=14)
maxDeptEntry.grid(row=5,column=0,sticky=N)
trainingLimitLabel = Label(frame1,text="Eğitim veriseti sınır indeksi:\n",font=14)
trainingLimitLabel.grid(row=6, column=0, sticky=W)
trainingLimitEntry = Entry(frame1,font=14)
trainingLimitEntry.grid(row=6, column = 0,sticky=N)
getResultBtn = Button(frame1,text="Sonuçları Göster",fg="green" ,command=getResult,font=16)
getResultBtn.grid(row=7)
root.mainloop()
| 2.90625
| 3
|
projects/examples/compare_gpa_and_knxproj.py
|
fgoettel/knx
| 0
|
12777241
|
<gh_stars>0
#!/usr/bin/env python3
"""Compare knx GAs from ETS and GPA export."""
import argparse
import logging
from pathlib import Path
from typing import Tuple
from projects.gpa import Gpa
from projects.knxproj import Knxproj
def get_args() -> Tuple[Path, Path]:
"""Set up the parser.
Returns a tuple with (gpa_path, knxproj_path).
"""
# Setup argument parser
description = "Compare knx GAs from ETS and GPA export."
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"--gpa", type=str, required=True, help="Path to the gpa .gpa export."
)
parser.add_argument(
"--knxproj", type=str, required=True, help="Path to the ETS .knxproj export."
)
# Parse arguments
args = parser.parse_args()
gpa_path = Path(args.gpa)
knxproj_path = Path(args.knxproj)
return (gpa_path, knxproj_path)
def main():
"""Compare knx GAs from ETS and GPA export."""
gpa_path, knxproj_path = get_args()
# Run
gpa_addresses = Gpa(gpa_path=gpa_path).groupaddresses
ets_addresses = Knxproj(knxproj_path=knxproj_path).groupaddresses
# Put them to two dicts
gpa_dict = {ga.address: ga for ga in gpa_addresses}
ets_dict = {ga.address: ga for ga in ets_addresses}
# Display all from the gpa and their counterpart from the ETS
for key in sorted(gpa_dict.keys()):
lhs = gpa_dict[key]
try:
rhs = ets_dict[key]
except KeyError:
logging.error(
"GA in gpa but not in knxproj: \n\t%s",
lhs,
)
continue
if not lhs.almost_equal(rhs):
logging.info("Unequal:\n\t%s\n\t%s", lhs, rhs)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
main()
| 3.09375
| 3
|
src/384-ShuffleanArray.py
|
Jiezhi/myleetcode
| 1
|
12777242
|
<reponame>Jiezhi/myleetcode
#!/usr/bin/env python
"""
CREATED AT: 2021/8/23
Des:
https://leetcode.com/problems/shuffle-an-array/
https://leetcode.com/explore/featured/card/top-interview-questions-easy/98/design/670/
GITHUB: https://github.com/Jiezhi/myleetcode
"""
import random
from typing import List
from itertools import permutations, cycle
class Solution2:
"""
sample 196 ms submission
"""
def __init__(self, nums: List[int]):
self.reset = lambda: nums
c = cycle(permutations(nums))
self.shuffle = lambda: list(next(c))
class Solution:
"""
10 / 10 test cases passed.
Status: Accepted
Runtime: 264 ms
Memory Usage: 19.5 MB
"""
def __init__(self, nums: List[int]):
self.nums = nums
self.copy = self.nums.copy()
def reset(self) -> List[int]:
"""
Resets the array to its original configuration and return it.
"""
self.nums = self.copy.copy()
return self.nums
def shuffle(self) -> List[int]:
"""
Returns a random shuffling of the array.
"""
random.shuffle(self.nums)
return self.nums
def test():
nums = [1, 2, 3]
solution = Solution(nums=nums)
assert solution.reset() == nums
ret = solution.shuffle()
assert len(ret) == len(ret)
for i in nums:
assert i in ret
assert solution.reset() == nums
if __name__ == '__main__':
test()
| 3.859375
| 4
|
get_moods_detail.py
|
MentalKali/QQZoneSpider
| 0
|
12777243
|
<gh_stars>0
#!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
获取动态详情
包含3个方法:
make_dict -- 用于临时保存每个QQ的动态信息,QQ号为键,值为这个QQ号的所有动态的文件列表
exact_mood_data -- 主要的功能函数,把动态信息从文件里提取出来,并调用insert_to_db方法插入到sqlite数据库中
insert_to_db -- 供exact_mood_data调用,把数据插入到sqlite数据库中
"""
import os
import json
import sqlite3
import html
class Get_detail(object):
''' Get moods detail information and save it to database'''
def __init__(self, conn, cur):
self.count = 0
self.conn = conn
self.cur = cur
def make_dict(self):
mood_dict = dict()
dir_list = os.listdir('mood_result')
for d in dir_list:
file_list = os.listdir('mood_result/' + d)
if len(file_list) != 1:
mood_dict[d] = file_list
return mood_dict
def exact_mood_data(self, qq, fname):
'''Get mood data from files in result folder
'''
qqnumber = qq
filename = fname
with open(filename, encoding="utf-8") as f:
con = f.read()
con_dict = json.loads(con[10:-2])
try:
moods = con_dict['msglist']
except KeyError:
return
if moods == None:
return
mood_item = dict()
mood_item['belong'] = qqnumber
for mood in moods:
mood_item['content'] = mood['content']
mood_item['create_time'] = mood['created_time']
mood_item['comment_num'] = mood['cmtnum']
mood_item['phone'] = mood['source_name']
mood_item['pic'] = mood['pic'][0]['url2'] if 'pic' in mood else ''
mood_item['locate'] = mood['story_info']['lbs']['name'] if 'story_info' in mood else ''
if mood_item['content'] == '' and mood_item['pic'] != '':
# if the mood only has pic but no other thing
mood_item['content'] = mood_item['pic']
if mood_item['content'] == '' and 'rt_con' in mood:
# 如果动态内容是一个转发的视频
# 它会被保存在mood['rt_con']中
try:
mood_item['content'] = mood['rt_con']['conlist'][0]['con']
except IndexError:
mood_item['content'] = mood['rt_con']['conlist'][1]['con']
except KeyError:
# when the mood only has a link
mood_item['content'] = mood['rt_con']['content']
except TypeError:
# when the mood only has a video
mood_item['content'] = mood['video'][0]['url3']
print('Dealing with QQ: %s, total moods count: %d' % (qqnumber, self.count))
self.insert_to_db(mood_item)
self.count += 1
if self.count % 1000 == 0:
self.conn.commit()
def insert_to_db(self, mood):
sql = 'INSERT INTO moods (qq, ctime, content, comment_count, phone, image, locate) VALUES (?, ?, ?, ?, ?, ?, ?)'
self.cur.execute(sql, (mood['belong'], mood['create_time'], mood['content'], mood['comment_num'], mood['phone'], mood['pic'], mood['locate']))
if __name__ == '__main__':
conn = sqlite3.connect('moods.sqlite')
cur = conn.cursor()
app = Get_detail(conn, cur)
mood_dict = app.make_dict()
for dirname, fname in mood_dict.items():
for each_file in fname:
filename = os.path.join('mood_result', dirname, each_file)
app.exact_mood_data(dirname, filename)
else:
conn.commit()
cur.close()
conn.close()
print('Finish!')
| 3.109375
| 3
|
unity_env.py
|
ostamand/continuous-control
| 2
|
12777244
|
import numpy as np
import torch
class UnityEnv():
"""Unity Reacher Environment Wrapper
https://github.com/Unity-Technologies/ml-agents/blob/master/docs/Learning-Environment-Examples.md
"""
def __init__(self, env_file='data/Reacher.exe', no_graphics=True, mlagents=False):
if mlagents:
from mlagents.envs.environment import UnityEnvironment
else:
from unityagents import UnityEnvironment
self.env = UnityEnvironment(file_name=env_file, no_graphics=no_graphics)
self.brain_name = self.env.brain_names[0]
brain = self.env.brains[self.brain_name]
self.action_size = brain.vector_action_space_size
if type(self.action_size) != int:
self.action_size = self.action_size[0]
env_info = self.env.reset(train_mode=True)[self.brain_name]
self.state_size = env_info.vector_observations.shape[1]
self.num_agents = len(env_info.agents)
def reset(self, train=True):
env_info = self.env.reset(train_mode=train)[self.brain_name]
return env_info.vector_observations
def close(self):
self.env.close()
def step(self, actions):
actions = np.clip(actions, -1, 1)
env_info = self.env.step(actions)[self.brain_name]
next_states = env_info.vector_observations
rewards = env_info.rewards
dones = env_info.local_done
return next_states, np.array(rewards), np.array(dones)
@property
def action_shape(self):
return (self.num_agents, self.action_size)
| 2.578125
| 3
|
sec09-3_apply_cfg_netmiko/apply_cfg.py
|
codered-by-ec-council/Network-Automation-in-Python
| 0
|
12777245
|
<gh_stars>0
#!/usr/bin/env python3
import argparse
import getpass
import netmiko
import os
def get_creds_interactive(get_secret=False):
"""
Function to interactively query for network device credentials
:param get_secret: Optional argument to query for enable or secret. Defaults to False and sec set to pwd.
:return: uname, pwd, sec
"""
uname = input("Enter Username: ")
pwd = getpass.getpass("Enter Password: ")
# TODO: Add option to bypass asking for enable or secret password if not needed
if get_secret:
sec = getpass.getpass("Enter Secret: ")
else:
sec = pwd
return uname, pwd, sec
def netmiko_send_cfg_list(conn_obj, cfg_file, save_cfg=False):
"""
Configure a device vith a list of configuration commands read from a file
:param conn_obj: Netmiko Connection Object (Established SSH session to a specific device)
:param cfg_file: text file with configuration
:param save_cfg: Optional boolean to save updated configuration. Defaults to false.
:return:
"""
# Check to make sure the file exists
if os.path.exists(cfg_file):
# Open configuration file and read in each line
with open(cfg_file) as fh:
cmd_list = fh.readlines()
results = conn_obj.send_config_set(cmd_list)
if save_cfg:
results += conn_obj.send_command("write mem")
print(f"Configuration has been saved!\n{results}")
else:
print("Alert! Configuration has not been saved!")
return results
def netmiko_send_cfg_file(conn_obj, cfg_file, save_cfg=False):
# Initialize results in case the file exists test fails since the function returns the results value.
results = ""
# Check to make sure the file exists
if os.path.exists(cfg_file):
# Notice Netmmiko handles opening the file for us!
results = conn_obj.send_config_from_file(cfg_file)
if save_cfg:
results += conn_obj.save_config()
print(f"Configuration has been saved!\n{results}")
else:
print("Alert! Configuration has not been saved!")
return results
def main():
"""
This script will apply the commands found in a configuration file to a device.
Both the device FQDN or IP and the config text file must be provided vi the command line.
"""
# Establish Connection to Device
username, password, secret = get_creds_interactive()
dev_dict = {
"device_type": "arista_eos",
"host": arguments.device,
"username": username,
"password": password,
"port": 22,
"secret": secret,
"verbose": True,
}
# Initialize the Connection Object to the device
dev_conn_obj = netmiko.ConnectHandler(**dev_dict)
# Send a list of configuration commands
netmiko_send_cfg_list(
dev_conn_obj, arguments.cfg_file, save_cfg=arguments.save_config
)
# Send commands from a file
# res = netmiko_send_cfg_file(dev_conn_obj, arguments.cfg_file, save_cfg=arguments.save_config)
print(res)
dev_conn_obj.disconnect()
# Standard call to the main() function.
if __name__ == "__main__":
# Initialize the Argument Parser object and include a description of the script and a hint on how touse
parser = argparse.ArgumentParser(
description="Script to send configuration commands in a file to a network device.",
epilog="Usage: ' python apply_cfg.py switch config_file' ",
)
# Add required positional arguments
parser.add_argument("device", type=str, help="Device name or IP")
parser.add_argument("cfg_file", type=str, help="Configuration File to Apply")
# Add optional command line arguments
parser.add_argument(
"-s",
"--save_config",
action="store_true",
help="Use -s in the CLI to save the configuration. By default the configuration on the device is not saved",
default=False,
)
arguments = parser.parse_args()
main()
| 3.25
| 3
|
Pacote Dowload/CursoemVideo/ex 090.py
|
AMF1971/Cursoemvideo-Python
| 0
|
12777246
|
#Faça um programa que leia nome e média de um aluno, guardando também a situação em um dicionário. No final,
# mostre o conteúdo da estrutura na tela.
aluno = dict()
aluno['nome'] = str(input('Nome:'))
aluno['média'] = float(input(f'Média de {aluno["nome"]}'))
if aluno['média'] >= 7:
aluno['situação'] = 'APROVADO'
elif 5 <= aluno['média'] < 7:
aluno['situação'] = 'RECUPERAÇÃO'
else:
aluno['situação'] = 'REPROVADO'
print('-=' * 30 )
for k, v in aluno.items():
print(f' - {k} é igual a {v}')
| 3.796875
| 4
|
Stacks/first non repeating char in stream.py
|
mr-mornin-star/problemSolving
| 0
|
12777247
|
<gh_stars>0
from collections import deque
from collections import defaultdict
class Solution:
# @param A : string
# @return a strings
def solve(self, a):
mem=defaultdict(int)
q=deque()
ans=[]
for c in a:
# print(q)
# print(mem)
if c not in mem:
q.append(c)
mem[c]+=1
while(len(q)!=0 and mem[q[0]]>1):
q.popleft()
if(len(q)==0):
ans.append("#")
else:
ans.append(q[0])
# print(mem["h"])
return "".join(ans)
"""
First non-repeating character
Problem Description
Given a string A denoting a stream of lowercase alphabets.
You have to make new string B. B is formed such that we have to find first non-repeating character each time a character is inserted to the stream and append it at the end to B. if no non-repeating character is found then append '#' at the end of B.
Problem Constraints
1 <= |A| <= 100000
Input Format
The only argument given is string A.
Output Format
Return a string B after processing the stream of lowercase alphabets A.
Example Input
Input 1:
A = "abadbc"
Input 2:
A = "abcabc"
Example Output
Output 1:
"aabbdd"
Output 2:
"aaabc#"
Example Explanation
Explanation 1:
"a" - first non repeating character 'a'
"ab" - first non repeating character 'a'
"aba" - first non repeating character 'b'
"abad" - first non repeating character 'b'
"abadb" - first non repeating character 'd'
"abadbc" - first non repeating character 'd'
Explanation 2:
"a" - first non repeating character 'a'
"ab" - first non repeating character 'a'
"abc" - first non repeating character 'a'
"abca" - first non repeating character 'b'
"abcab" - first non repeating character 'c'
"abcabc" - no non repeating character so '#'
"""
| 3.46875
| 3
|
Skoarcery/factoary/Code_Parser_Py.py
|
sofakid/Skoarcery
| 343
|
12777248
|
<filename>Skoarcery/factoary/Code_Parser_Py.py
import unittest
from Skoarcery import langoids, terminals, nonterminals, dragonsets, parsetable, emissions
from Skoarcery.langoids import Terminal, Nonterminal
class Code_Parser_Py(unittest.TestCase):
def setUp(self):
terminals.init()
nonterminals.init()
langoids.init()
dragonsets.init()
parsetable.init()
emissions.init()
def test_PY_rdpp(self):
from Skoarcery.dragonsets import FIRST, FOLLOW
from Skoarcery.terminals import Empty
fd = open("SkoarPyon/rdpp.py", "w")
PY = emissions.PY
PY.fd = fd
# Header
# Imports
# class SkoarParseException
# class SkoarParser:
# __init__
# fail
self.code_start()
PY.tab += 1
N = nonterminals.nonterminals.values()
# precompute desirables
PY.method("init_desirables")
for A in N:
R = A.production_rules
PY.nl()
PY.cmt(str(A))
# each production
for P in R:
if P.derives_empty:
continue
# A -> alpha
alpha = P.production
desires = FIRST(alpha)
if Empty in desires:
desires.discard(Empty)
desires.update(FOLLOW(A))
i = 0
n = len(desires)
PY.dict_set("self.desirables", str(P), "[", end="")
for toke in desires:
PY.raw(toke.toker_name)
i += 1
if i != n:
if i % 5 == 0:
PY.raw(",\n")
PY.stmt(" ", end="")
else:
PY.raw(", ")
else:
PY.raw("]\n")
PY.end()
# write each nonterminal as a function
for A in N:
R = A.production_rules
#PY.cmt(str(A))
PY.stmt("def " + A.name + "(self, parent):")
PY.tab += 1
PY.stmt("self.tab += 1")
if A.intermediate:
PY.var("noad", "parent")
else:
PY.var("noad", PY.v_new("SkoarNoad", PY.v_sym(A.name), "parent"))
PY.nl()
#PY.code_line("print('" + A.name + "')")
for P in R:
if P.derives_empty:
continue
# A -> alpha
alpha = P.production
PY.stmt("desires = " + PY.v_dict_get("self.desirables", str(P)))
PY.if_("self.toker.sees(desires)")
#PY.print(str(P))
for x in alpha:
if isinstance(x, Terminal):
PY.stmt("noad.add_toke('" + x.toker_name + "', self.toker.burn(" + x.toker_name + "))")
#PY.print("burning: " + x.name)
else:
if x.intermediate:
PY.stmt("self." + x.name + "(noad)")
else:
PY.stmt("noad.add_noad(self." + x.name + "(noad))")
else:
PY.return_("noad")
PY.tab -= 1
PY.nl()
if A.derives_empty:
PY.cmt("<e>")
#PY.print("burning empty")
PY.return_("noad")
else:
PY.cmt("Error State")
PY.stmt("self.fail()")
PY.tab -= 1
PY.nl()
PY.tab -= 1
fd.close()
def code_start(self):
from Skoarcery.terminals import Empty
PY = emissions.PY
PY.file_header("rdpp", "PyRDPP - Create Recursive Descent Predictive Parser")
s = "from Skoarcery.SkoarPyon.apparatus import SkoarNoad\n"\
"from Skoarcery.SkoarPyon.lex import "
T = terminals.tokens.values()
n = len(T)
i = 0
for t in T:
if t == Empty:
n -= 1
continue
s += t.toker_name
i += 1
if i < n:
if i % 5 == 0:
s += ", \\\n "
else:
s += ", "
PY.raw(s + """
class SkoarParseException(Exception):
pass
class SkoarParser:
def __init__(self, runtime):
self.runtime = runtime
self.toker = runtime.toker
self.tab = 0
self.desirables = dict()
self.init_desirables()
def fail(self):
self.toker.dump()
raise SkoarParseException
@property
def tabby(self):
if self.tab == 0:
return ""
return ("{:>" + str(self.tab * 2) + "}").format(" ")
def print(self, line, end):
print(self.tabby + line, end=end)
""")
| 2.359375
| 2
|
tests/test_permissions.py
|
radiac/django-fastview
| 8
|
12777249
|
"""
Test fastview/permissions.py
"""
import pytest
from fastview.permissions import Django, Login, Owner, Public, Staff, Superuser
from .app.models import Entry
def test_public__public_can_access(test_data, request_public):
perm = Public()
assert perm.check(request_public) is True
assert perm.filter(request_public, test_data).count() == test_data.count()
def test_login__public_cannot_access(test_data, request_public):
perm = Login()
assert perm.check(request_public) is False
assert perm.filter(request_public, test_data).count() == 0
def test_login__authed_can_access(test_data, request_owner):
perm = Login()
assert perm.check(request_owner) is True
assert perm.filter(request_owner, test_data).count() == test_data.count()
def test_staff__public_cannot_access(test_data, request_public):
perm = Staff()
assert perm.check(request_public) is False
assert perm.filter(request_public, test_data).count() == 0
def test_staff__authed_cannot_access(test_data, request_owner):
perm = Staff()
assert perm.check(request_owner) is False
assert perm.filter(request_owner, test_data).count() == 0
def test_staff__staff_can_access(test_data, request_staff):
perm = Staff()
assert perm.check(request_staff) is True
assert perm.filter(request_staff, test_data).count() == test_data.count()
def test_superuser__public_cannot_access(test_data, request_public):
perm = Superuser()
assert perm.check(request_public) is False
assert perm.filter(request_public, test_data).count() == 0
def test_superuser__authed_cannot_access(test_data, request_owner):
perm = Superuser()
assert perm.check(request_owner) is False
assert perm.filter(request_owner, test_data).count() == 0
def test_superuser__staff_cannot_access(test_data, request_staff):
perm = Superuser()
assert perm.check(request_staff) is False
assert perm.filter(request_staff, test_data).count() == 0
def test_superuser__superuser_can_access(test_data, request_superuser):
perm = Superuser()
assert perm.check(request_superuser) is True
assert perm.filter(request_superuser, test_data).count() == test_data.count()
def test_django__public_cannot_access(test_data, request_public):
perm = Django(action="add")
assert perm.check(request_public, model=Entry) is False
assert perm.filter(request_public, test_data).count() == 0
def test_django__authed_cannot_access(test_data, request_owner):
perm = Django(action="add")
assert perm.check(request_owner, model=Entry) is False
assert perm.filter(request_owner, test_data).count() == 0
def test_django__staff_cannot_access(test_data, request_staff):
perm = Django(action="add")
assert perm.check(request_staff, model=Entry) is False
assert perm.filter(request_staff, test_data).count() == 0
def test_django__superuser_can_access(test_data, request_superuser):
perm = Django(action="add")
assert perm.check(request_superuser, model=Entry) is True
assert perm.filter(request_superuser, test_data).count() == test_data.count()
@pytest.mark.django_db
def test_django__user_with_permission_can_access(
test_data, request_other, user_other, add_entry_permission
):
user_other.user_permissions.add(add_entry_permission)
perm = Django(action="add")
assert perm.check(request_other, model=Entry) is True
assert perm.filter(request_other, test_data).count() == test_data.count()
def test_owner__public_cannot_access(test_data, request_public):
perm = Owner(owner_field="author")
# Test data is ordered, the first is owned by user_owner
owned = test_data.first()
assert perm.check(request_public, instance=owned) is False
assert perm.filter(request_public, test_data).count() == 0
def test_owner__owner_can_access_theirs(test_data, request_owner, user_owner):
perm = Owner(owner_field="author")
owned = test_data.first()
assert perm.check(request_owner, instance=owned) is True
assert perm.filter(request_owner, test_data).count() == 2
assert perm.filter(request_owner, test_data).filter(author=user_owner).count() == 2
def test_owner__other_can_access_theirs(test_data, request_other, user_other):
perm = Owner(owner_field="author")
owned = test_data.first()
assert perm.check(request_other, instance=owned) is False
assert perm.filter(request_other, test_data).count() == 2
assert perm.filter(request_other, test_data).filter(author=user_other).count() == 2
def test_owner__staff_cannot_access(test_data, request_staff):
perm = Owner(owner_field="author")
owned = test_data.first()
assert perm.check(request_staff, instance=owned) is False
assert perm.filter(request_staff, test_data).count() == 0
def test_owner__superuser_cannot_access(test_data, request_superuser):
perm = Owner(owner_field="author")
owned = test_data.first()
assert perm.check(request_superuser, instance=owned) is False
assert perm.filter(request_superuser, test_data).count() == 0
def test_and__owner_and_staff__owner_cannot_access(test_data, request_owner):
perm = Owner(owner_field="author") & Staff()
owned = test_data.first()
assert perm.check(request_owner, instance=owned) is False
assert perm.filter(request_owner, test_data).count() == 0
def test_and__owner_and_staff__staff_cannot_access(test_data, request_staff):
perm = Owner(owner_field="author") & Staff()
owned = test_data.first()
assert perm.check(request_staff, instance=owned) is False
assert perm.filter(request_staff, test_data).count() == 0
def test_and__owner_and_staff__staff_owner_can_access(
test_data, request_owner, user_owner
):
perm = Owner(owner_field="author") & Staff()
owned = test_data.first()
user_owner.is_staff = True
user_owner.save()
assert perm.check(request_owner, instance=owned) is True
assert perm.filter(request_owner, test_data).count() == 2
def test_or__owner_or_staff__owner_can_access(test_data, request_owner):
perm = Owner(owner_field="author") | Staff()
owned = test_data.first()
assert perm.check(request_owner, instance=owned) is True
assert perm.filter(request_owner, test_data).count() == 2
def test_or__owner_or_staff__staff_can_access(test_data, request_staff):
perm = Owner(owner_field="author") | Staff()
owned = test_data.first()
assert perm.check(request_staff, instance=owned) is True
assert perm.filter(request_staff, test_data).count() == 4
def test_or__owner_or_staff__staff_owner_can_access(
test_data, request_owner, user_owner
):
perm = Owner(owner_field="author") | Staff()
owned = test_data.first()
user_owner.is_staff = True
user_owner.save()
assert perm.check(request_owner, instance=owned) is True
assert perm.filter(request_owner, test_data).count() == 4
def test_or__owner_or_staff__other_cannot_access(test_data, request_other, user_other):
perm = Owner(owner_field="author") | Staff()
owned = test_data.first()
assert perm.check(request_other, instance=owned) is False
assert perm.filter(request_other, test_data).count() == 2
assert perm.filter(request_other, test_data).filter(author=user_other).count() == 2
def test_not__not_owner__all_can_access_all_except_own(
test_data, request_owner, user_owner
):
perm = ~Owner(owner_field="author")
owned = test_data.first()
not_owned = test_data.exclude(author=user_owner).first()
assert perm.check(request_owner, instance=owned) is False
assert perm.check(request_owner, instance=not_owned) is True
assert perm.filter(request_owner, test_data).count() == 2
assert perm.filter(request_owner, test_data).filter(author=user_owner).count() == 0
def test_and_not__staff_not_owner__staff_can_access_all_except_own(
test_data, request_owner, user_owner
):
perm = Staff() & ~Owner(owner_field="author")
owned = test_data.first()
not_owned = test_data.exclude(author=user_owner).first()
user_owner.is_staff = True
user_owner.save()
assert perm.check(request_owner, instance=owned) is False
assert perm.check(request_owner, instance=not_owned) is True
assert perm.filter(request_owner, test_data).count() == 2
assert perm.filter(request_owner, test_data).filter(author=user_owner).count() == 0
| 2.3125
| 2
|
enigma/__version__.py
|
axevalley/enigma
| 0
|
12777250
|
"""Tabler."""
__title__ = "enigma"
__description__ = "Enigma emulator"
__url__ = ""
__version__ = "0.1"
__author__ = "<NAME>"
__author_email__ = "<EMAIL>"
__license__ = "MIT"
__copyright__ = "Copyright 2019 <NAME>"
| 1.054688
| 1
|