code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
import pyven.constants
from pyven.steps.step import Step
from pyven.checkers.checker import Checker
from pyven.logging.logger import Logger
from pyven.reporting.content.step import StepListing
class Clean(Step):
def __init__(self, verbose, nb_threads=1):
super(Clean, self).__init__(verbose)
self.name = 'clean'
self.checker = Checker('Cleaning')
self.nb_threads = nb_threads
def process(self):
return self._process_parallel()
@Step.error_checks
def _process(self, project):
ok = True
for tool in project.builders:
if not tool.clean(self.verbose):
ok = False
for tool in project.preprocessors:
if not tool.clean(self.verbose):
ok = False
if not ok:
project.status = pyven.constants.STATUS[1]
Logger.get().error(self.name + ' errors found')
else:
project.status = pyven.constants.STATUS[0]
Logger.get().info(self.name + ' completed')
return ok
def report_content(self):
listings = []
for project in Step.PROJECTS:
for builder in project.builders:
listings.append(builder.report_content())
if self.checker.enabled():
listings.append(self.checker.report_content())
return StepListing(title=self.title(), status=self.report_status(), listings=listings, enable_summary=True)
|
mgaborit/pyven
|
source/pyven/steps/clean.py
|
Python
|
mit
| 1,475
|
"""
HTML and RSS based publisher.
"""
from GitAchievements.publisher.base import Publisher
import jinja2
import os.path
import re
class HtmlPublisher(Publisher):
"""
HTML and RSS based publisher.
"""
def __init__(self, app):
"""
Sets up the HTML and RSS publisher.
"""
super(HtmlPublisher, self).__init__(app)
self.html_template = self.config.get_value(
'achievement', 'publisher_html_template_html',
default = 'original.html')
self.rss_template = self.config.get_value(
'achievement', 'publisher_html_template_rss',
default = 'original.rss')
self.jinja_env = jinja2.Environment(
loader = jinja2.FileSystemLoader(os.path.join(app.achievements_dir, 'templates')))
self.jinja_env.filters['regex_replace'] = regex_replace
def publish(self, app):
"""
Publish achievements to an HTML file and an RSS file.
"""
context = {
'all_achievements': sorted(app.all_achievements, key = lambda a: a.name),
'unlocked_achievements': sorted(app.unlocked_achievements, key = lambda a: a.name),
'locked_achievements': app.locked_achievements(),
'points': app.get_points(),
}
for section in self.config.sections():
data = {}
for option in self.config.options(section):
data[option] = self.config.get_value(section, option)
context[section] = data
html_template = self.jinja_env.get_template(self.html_template)
rss_template = self.jinja_env.get_template(self.rss_template)
html = html_template.render(context)
rss = rss_template.render(context)
html_file = os.path.join(app.achievements_dir, 'index.html')
rss_file = os.path.join(app.achievements_dir, 'index.rss')
with open(html_file, 'w') as html_fh:
html_fh.write(html)
with open(rss_file, 'w') as rss_fh:
rss_fh.write(rss)
return [html_file, rss_file]
def regex_replace(value, regex, replace):
"""
Jinja tag similar to the built-in replace but takes a regex.
"""
return re.sub(regex, replace, value)
|
cadyyan/git-achievements-rewrite
|
GitAchievements/publisher/html.py
|
Python
|
mit
| 1,964
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os,sys # system functions
import nipype.interfaces.io as nio # Data i/o
import nipype.interfaces.fsl as fsl # fsl
import nipype.pipeline.engine as pe # pypeline engine
import nipype.interfaces.utility as util # utility
import nipype.interfaces.ants as ants
from nipype.interfaces.c3 import C3dAffineTool
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
"""
Project info
"""
template_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
project_dir="/lustre/work/dpanyuko/Analysis/RuleSwitch/"
work_dir="/lustre/scratch/dpanyuko/RuleSwitch/"
if not os.path.exists(work_dir):
os.makedirs(work_dir)
#which subjects to run
subj_list=str(sys.argv[1])
"""
Create workflow
"""
wf = pe.Workflow(name='wf')
wf.base_dir = os.path.join(work_dir,"reg_wdir")
wf.config = {"execution": {"crashdump_dir":os.path.join(work_dir,'reg_crashdumps')}}
infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource")
infosource.iterables = ('subject_id', [subj_list])
def get_subjectinfo(subject_id):
run_id=[]
if subject_id=="Subject003" or subject_id=="Subject011" or subject_id=="Subject016" or subject_id=="Subject020":
run_id=["2","3","4","5","6","7","8"]
elif subject_id=="Subject019":
run_id=["1","2","3","4","5","6"]
else:
run_id=["1","2","3","4","5","6","7","8"]
return subject_id, run_id
subjinfo = pe.Node(util.Function(input_names=['subject_id'],
output_names=['subject_id','run_id'],
function=get_subjectinfo),
name='subjectinfo')
subjinfo.inputs.base_dir = project_dir
wf.connect([(infosource, subjinfo, [('subject_id', 'subject_id')]),])
datasource = pe.Node(nio.DataGrabber(infields=['subject_id','run_id'], outfields=['func', 'anat']), name='datasource')
datasource.inputs.base_directory = project_dir
datasource.inputs.template = '*'
datasource.inputs.field_template = dict(func='%s/bold/run%s/run*_mcf_brain.nii.gz',
anat='%s/anatomy/highres001_BrainExtractionBrain.nii.gz')
datasource.inputs.template_args = dict(func=[['subject_id','run_id']],
anat=[['subject_id']])
datasource.inputs.sort_filelist=True
wf.connect(subjinfo, 'subject_id', datasource, 'subject_id')
wf.connect(subjinfo, 'run_id', datasource, 'run_id')
"""
Setup preprocessing workflow
----------------------------
Set up a node to define all inputs required for the preprocessing workflow
"""
inputnode = pe.Node(interface=util.IdentityInterface(fields=['func',
'anat',]),
name='inputspec')
wf.connect([(datasource, inputnode, [('anat','anat'),('func', 'func'),]),])
"""
Convert functional images to float representation. Since there can be more than
one functional run we use a MapNode to convert each run.
"""
prefiltered_func_data = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float',
op_string = '',
suffix='_dtype'),
iterfield=['in_file'],
name='prefiltered_func_data')
wf.connect(inputnode, 'func', prefiltered_func_data, 'in_file')
"""
Extract the middle volume of the run as the reference
"""
example_func = pe.MapNode(interface=fsl.ExtractROI(t_size=1),
iterfield=['in_file'],
name = 'example_func')
"""
Define a function to return the 1 based index of the middle volume
"""
def getmiddlevolume(func):
from nibabel import load
funcfile = func
if isinstance(func, list):
funcfile = func[0]
_,_,_,timepoints = load(funcfile).get_shape()
return (timepoints/2)-1
wf.connect(prefiltered_func_data, 'out_file', example_func, 'in_file')
wf.connect(inputnode, ('func', getmiddlevolume), example_func, 't_min')
"""
Registration
------------
"""
"""
---ANTs---
"""
reg = pe.Node(ants.Registration(), name='antsRegister')
reg.inputs.output_transform_prefix = "output_"
reg.inputs.transforms = ['Rigid', 'Affine', 'SyN']
reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.2, 3.0, 0.0)]
reg.inputs.number_of_iterations = [[100000, 111100, 111100]] * 2 + [[1000, 300, 200]]
reg.inputs.dimension = 3
reg.inputs.write_composite_transform = True
reg.inputs.collapse_output_transforms = True
reg.inputs.initial_moving_transform_com = True
reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
reg.inputs.convergence_window_size = [20] * 2 + [5]
reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
reg.inputs.sigma_units = ['vox'] * 3
reg.inputs.shrink_factors = [[3, 2, 1]]*2 + [[4, 2, 1]]
reg.inputs.use_estimate_learning_rate_once = [True] * 3
reg.inputs.use_histogram_matching = [False] * 2 + [True]
reg.inputs.winsorize_lower_quantile = 0.005
reg.inputs.winsorize_upper_quantile = 0.995
reg.inputs.args = '--float'
reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
reg.inputs.num_threads = 12
reg.inputs.fixed_image=template_brain
wf.connect(inputnode, 'anat', reg, 'moving_image')
"""
---Func registration---
"""
"""
Estimate the tissue classes from the anatomical image.
"""
fast = pe.Node(fsl.FAST(), name='fast')
"""
Binarize the segmentation
"""
binarize = pe.Node(fsl.ImageMaths(op_string='-nan -thr 0.5 -bin'),
name='binarize')
pickindex = lambda x, i: x[i]
wf.connect(inputnode, 'anat', fast, 'in_files')
wf.connect(fast, ('partial_volume_files', pickindex, 2), binarize, 'in_file')
"""
Calculate rigid transform from example_func image to anatomical image
"""
func2anat = pe.MapNode(fsl.FLIRT(), iterfield=['in_file'], name='func2anat')
func2anat.inputs.dof = 6
wf.connect(example_func, 'roi_file', func2anat, 'in_file')
wf.connect(inputnode, 'anat', func2anat, 'reference')
"""
Now use bbr cost function to improve the transform
"""
func2anatbbr = pe.MapNode(fsl.FLIRT(), iterfield=['in_file','in_matrix_file'], name='func2anatbbr')
func2anatbbr.inputs.dof = 6
func2anatbbr.inputs.cost = 'bbr'
func2anatbbr.inputs.schedule = os.path.join(os.getenv('FSLDIR'),'etc/flirtsch/bbr.sch')
wf.connect(example_func, 'roi_file', func2anatbbr, 'in_file')
wf.connect(binarize, 'out_file', func2anatbbr, 'wm_seg')
wf.connect(inputnode, 'anat', func2anatbbr, 'reference')
wf.connect(func2anat, 'out_matrix_file', func2anatbbr, 'in_matrix_file')
"""
Convert the BBRegister transformation to ANTS ITK format
"""
convert2itk = pe.MapNode(C3dAffineTool(), iterfield=['source_file','transform_file'], name='convert2itk')
convert2itk.inputs.fsl2ras = True
convert2itk.inputs.itk_transform = True
wf.connect(func2anatbbr, 'out_matrix_file', convert2itk, 'transform_file')
wf.connect(example_func, 'roi_file',convert2itk, 'source_file')
wf.connect(inputnode, 'anat', convert2itk, 'reference_file')
"""
Concatenate the affine and ants transforms into a list
"""
merge = pe.MapNode(util.Merge(2), iterfield=['in2'], name='mergexfm')
wf.connect(convert2itk, 'itk_transform', merge, 'in2')
wf.connect(reg, 'composite_transform', merge, 'in1')
"""
Transform the mean image. First to anatomical and then to target
"""
warp_func = pe.MapNode(ants.ApplyTransforms(), iterfield=['input_image','transforms'], name='warp_func')
warp_func.inputs.input_image_type = 0
warp_func.inputs.interpolation = 'Linear'
warp_func.inputs.invert_transform_flags = [False, False]
warp_func.inputs.terminal_output = 'file'
wf.connect(example_func, 'roi_file', warp_func, 'input_image')
wf.connect(merge, 'out', warp_func, 'transforms')
warp_func.inputs.reference_image = template_brain
"""
Save data
"""
datasink = pe.Node(nio.DataSink(), name='sinker')
datasink.inputs.base_directory=os.path.join(project_dir, "reg")
wf.connect(subjinfo, 'subject_id', datasink, 'container')
wf.connect(reg, 'warped_image', datasink, 'anatomy.anat2standard')
wf.connect(reg, 'composite_transform', datasink, 'anatomy.anat2standard_mat')
wf.connect(warp_func, 'output_image', datasink, 'bold.func2standard')
wf.connect(func2anatbbr, 'out_matrix_file', datasink, 'bold.func2anat_transform')
wf.connect(merge, 'out', datasink, 'bold.func2standard_mat')
wf.connect(example_func, 'roi_file', datasink, 'bold.example_func')
"""
Run
"""
outgraph = wf.run(plugin='MultiProc')
|
dpaniukov/RulesFPC
|
reg/ants_reg.py
|
Python
|
mit
| 8,750
|
"""
Django settings for SmartTrainer project.
Generated by 'django-admin startproject' using Django 1.9.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$fd1clcakbjsn9(474g11=03__)$&ep24_8tp%8yuk7z3ylzj!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
FACEBOOK_APP_ID = '1680549882220393'
FACEBOOK_APP_SECRET = '9bbc8ce2e15d3eb10256f099339754d9'
# Application definition
INSTALLED_APPS = [
'users',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'SmartTrainer.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'SmartTrainer.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
cffls/SmartTrainnerServer
|
SmartTrainer/SmartTrainer/settings.py
|
Python
|
mit
| 3,292
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
TweetFreq - A flask-based web application for analyzing tweets
"""
from datetime import datetime, timedelta
from time import sleep
from redis import StrictRedis
from flask import Flask, render_template, flash, jsonify, redirect, url_for
from flask.json import loads, dumps
from flask_wtf import Form
from wtforms import StringField
from wtforms.validators import DataRequired, Length
from twitter import get_count, get_full_timeline, get_tweet_datetimes,\
get_words_from_tweets, get_next_reset, convert_timestamp,\
TwitterRateLimitException, NoTweetsException
from flask_babel import Babel
from babel.numbers import format_number
from babel.dates import format_timedelta
from twython import TwythonAuthError
from celery import Celery
__author__ = "Sean Whalen"
__license___ = "MIT"
__version__ = '0.1.0'
APP = Flask(__name__)
APP.config.from_pyfile("config.py")
BABEL = Babel(APP)
REDIS_PREFIX = "tweetfreq"
CACHE_HOURS = 1
CELERY = Celery(REDIS_PREFIX, broker=APP.config["BROKER_URL"])
CELERY.conf.CELERY_TASK_SERIALIZER = 'json'
CELERY.conf.CELERY_RESULT_SERIALIZER = 'json'
CELERY.conf.CELERY_ACCEPT_CONTENT = ['json']
LOCALE = APP.config['BABEL_DEFAULT_LOCALE']
class UserForm(Form):
"""Form to search by username"""
name = StringField(label="Username", validators=[Length(max=16),
DataRequired()])
def flash_errors(form):
"""Flashes form errors"""
for field, errors in form.errors.items():
for error in errors:
flash("Error in the %s field - %s" % (
getattr(form, field).label.text, error), 'error')
@APP.errorhandler(500)
def server_error(e):
"""Handles server errors"""
# Ignore unused arguments
# pylint: disable=W0613
return render_template("errors/500.html"), 500
@APP.errorhandler(404)
def not_found_error(e):
"""HTTP 404 view"""
# Ignore unused arguments
# pylint: disable=W0613
return render_template("errors/404.html"), 404
@CELERY.task
def load_tweets(username):
"""Loads json results into redis as a cache"""
redis = StrictRedis()
redis_key = "%s.user.%s" % (REDIS_PREFIX, username)
status = redis.get(redis_key)
# Prevent DoS
if status is not None and loads(status)['status'] != 'queued':
return None
try:
created = datetime.utcnow()
status = dumps(dict(stats='running', header="Retrieving tweets",
message='', code=200))
redis.set(redis_key, status)
redis.expire(redis_key, 2*60)
timeline = get_full_timeline(username)
start = timeline[-1]
start = dict(id=start['id'],
timestamp=convert_timestamp(start['created_at']))
end = timeline[0]
end = dict(id=end['id'],
timestamp=convert_timestamp(end['created_at']))
total = len(timeline)
formatted_total = format_number(total, locale=LOCALE)
status = dumps(dict(stats='running', header="Processing tweets",
message='Received %s tweets' % formatted_total,
code=200))
redis.set(redis_key, status)
redis.expire(redis_key, 10*60)
total = dict(int=total, formatted=formatted_total)
words = get_count(get_words_from_tweets(timeline), limit=300)
dates = get_count(get_tweet_datetimes(timeline, date_only=True),
order_by=0, reverse=False)
sum = 0
for date in dates:
sum += date[1]
avg = float(sum) / float(len(dates))
avg = dict(int=avg, formatted=format_number(avg, locale=LOCALE))
_max = sorted(dates, key=lambda x: x[1], reverse=True)[0][1]
_max = dict(int=_max, formatted=format_number(_max, locale=LOCALE))
expires = datetime.utcnow() + timedelta(hours=CACHE_HOURS)
stats = dict(avg_per_day=avg, max_per_day=_max, total=total)
status = 'done'
status = dict(status=status, code=200,
data=dict(start=start, end=end, dates=dates, words=words,
stats=stats, created=created,
expires=expires, users=[username],
search_terms=[]))
status = dumps(status)
redis.set(redis_key, status)
redis.expire(redis_key, CACHE_HOURS*60*60)
except TwythonAuthError:
status = 'error'
header = "Tweets not available"
message = "That user's timeline is protected/private"
status = dict(status=status, header=header, message=message,
code=403)
status = dumps(status)
redis.set(redis_key, status)
redis.expire(redis_key, 60*5)
except ValueError:
status = 'error'
header = "User not found"
message = "The specified Twitter username does not exist"
status = dict(status=status, header=header, message=message,
code=404)
status = dumps(status)
redis.set(redis_key, status)
redis.expire(redis_key, 60*5)
except TwitterRateLimitException:
status = 'error'
header = "Resources exhausted"
reset = format_timedelta(get_next_reset() - datetime.utcnow(),
locale=LOCALE)
message = "TweetFreq is under a heavy load. Try again in %s." % reset
status = dict(status=status, header=header, message=message, code=503)
status = dumps(status)
redis.set(redis_key, status)
redis.expire(redis_key, 2)
except NoTweetsException:
status = 'error'
header = "No tweets found"
message = ""
status = dict(status=status, header=header, message=message,
code=404)
status = dumps(status)
redis.set(redis_key, status)
redis.expire(redis_key, 60*5)
@APP.context_processor
def inject_status():
"""Injects status info into templates"""
return dict(version=__version__,
status=StrictRedis().get('tweetfreq.status'))
@APP.route('/', methods=('GET', 'POST'))
def index():
"""The index view"""
form = UserForm()
if form.validate_on_submit():
username = form.name.data.lower()
if username.startswith('@'):
username = username[1:]
return redirect(url_for('view_user_report', username=username),
301)
return render_template("index.html", form=form)
@APP.route("/about/")
def about():
"""he about view"""
return render_template("about.html")
@APP.route('/u/<username>.json')
def view_user_json(username):
"""The twitter user JSON view"""
username = username.lower()
redis_key = "%s.user.%s" % (REDIS_PREFIX, username)
redis = StrictRedis()
cache = redis.get(redis_key)
if not cache:
cache = dict(status='queued', header='Queued',
message="Your request will be processed shortly",
code=200)
redis.set(redis_key, dumps(cache))
redis.expire(redis_key, CACHE_HOURS*60*60)
load_tweets.delay(username)
sleep(.5)
cache = loads(redis.get(redis_key))
return jsonify(cache)
@APP.route('/u/<username>/')
def view_user_report(username):
"""The twitter user report view"""
return render_template('user.html', username=username)
if __name__ == '__main__':
APP.run()
|
seanthegeek/TweetFreq
|
tweetfreq.py
|
Python
|
mit
| 7,488
|
DEBUG = True
THREADS_PER_PAGE = 4
CSRF_ENABLED = True
CSRF_SESSION_KEY = 'secret'
DATABASE = 'database/rmcontrol.db',
SECRET_KEY = 'YOUR_KEY_GOES_HERE_abcdefghijklm',
USERNAME = 'admin',
PASSWORD = 'default',
|
ericmagnuson/rmcontrol
|
config.py
|
Python
|
mit
| 215
|
import re
from django.core.management.base import BaseCommand
from openpyxl import load_workbook
from api.directions.sql_func import get_confirm_direction_patient_year, get_lab_podr
from api.patients.views import full_patient_search_data
from clients.models import Individual, Card
import datetime
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('path', type=str)
def handle(self, *args, **kwargs):
data_arg = kwargs["path"].split(";")
fp = data_arg[0]
self.stdout.write("Path: " + fp)
wb = load_workbook(filename=fp)
ws = wb[wb.sheetnames[0]]
starts = False
fio, born = '', ''
p = re.compile(r'^[а-яё]{3}[0-9]{8}$', re.IGNORECASE)
start_date = data_arg[1]
end_date = data_arg[2]
is_lab, is_paraclinic, is_doc_refferal = True, False, False
if len(data_arg) > 3 and data_arg[3] == "is_paraclinic":
is_lab, is_paraclinic, is_doc_refferal = False, True, False
elif len(data_arg) > 3 and data_arg[3] == "is_doc_refferal":
is_lab, is_paraclinic, is_doc_refferal = False, False, True
lab_podr = get_lab_podr()
lab_podr = [i[0] for i in lab_podr]
for row in ws.rows:
cells = [str(x.value) for x in row]
if not starts:
if "Ф.И.О." in cells:
fio = cells.index("Ф.И.О.")
born = cells.index("Дата рождения")
starts = True
else:
query = f"{cells[fio]} {cells[born]}"
if not cells[fio]:
break
f, n, p, rmis_req, split = full_patient_search_data(p, query)
if len(split) > 3 or (len(split) == 3 and split[-1].isdigit()):
objects = Individual.objects.filter(
family__istartswith=f, name__istartswith=n, card__base__internal_type=True, birthday=datetime.datetime.strptime(cells[born], "%d.%m.%Y").date()
)
if len(split) > 3:
objects.filter(patronymic__istartswith=p)
objects = objects[:10]
else:
objects = Individual.objects.filter(family__istartswith=f, name__istartswith=n, patronymic__istartswith=p, card__base__internal_type=True)[:10]
cards = Card.objects.filter(base__internal_type=True, individual__in=objects)
if cards:
for c in cards:
confirmed_directions = get_confirm_direction_patient_year(start_date, end_date, lab_podr, c.pk, is_lab, is_paraclinic, is_doc_refferal)
if not confirmed_directions:
continue
directions = []
for d in confirmed_directions:
if d.direction not in directions:
directions.append(d.direction)
print(f"{c.pk};{c.number};{c.individual};{c.individual.birthday};{directions}") # noqa: T001
|
moodpulse/l2
|
clients/management/commands/find_card_by_fio.py
|
Python
|
mit
| 3,152
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2020 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Extensions allow integration of features into a record class.
For instance, the system fields feature is built as an extension.
"""
class ExtensionMixin:
"""Defines the methods needed by an extension."""
def pre_init(self, record, data, model=None, **kwargs):
"""Called when a new record instance is initialized.
Called when a new record is instantiated (i.e. during all
``Record({...})``). This means it's also called when e.g. a record
is created via ``Record.create()``.
:param data: The dict passed to the record's constructor.
:param model: The model class used for initialization.
"""
def post_init(self, record, data, model=None, **kwargs):
"""Called after a record is initialized."""
def pre_dump(self, record, data, dumper=None):
"""Called before a record is dumped."""
def post_dump(self, record, data, dumper=None):
"""Called after a record is dumped."""
def pre_load(self, data, loader=None):
"""Called before a record is loaded."""
def post_load(self, record, data, loader=None):
"""Called after a record is loaded."""
def pre_create(self, record):
"""Called before a record is created."""
def post_create(self, record):
"""Called after a record is created."""
def pre_commit(self, record):
"""Called before a record is committed."""
def pre_delete(self, record, force=False):
"""Called before a record is deleted."""
def post_delete(self, record, force=False):
"""Called after a record is deleted."""
def pre_revert(self, record, revision):
"""Called before a record is reverted."""
def post_revert(self, new_record, revision):
"""Called after a record is reverted."""
class RecordExtension(ExtensionMixin):
"""Base class for a record extensions."""
class RecordMeta(type):
"""Metaclass responsible for initializing the extension registry."""
def __new__(mcs, name, bases, attrs):
"""Create a new record class."""
# Initialise an "_extensions" attribute on each class, to ensure each
# class have a separate extensions registry.
if '_extensions' not in attrs:
attrs['_extensions'] = []
# Construct the class.
return super().__new__(mcs, name, bases, attrs)
|
inveniosoftware/invenio-records
|
invenio_records/extensions.py
|
Python
|
mit
| 2,608
|
import json
from random import shuffle
class GameState(object):
'''simple class to store game data'''
FILE_NAME = 'data/savegame.json'
default_data = {
'available_levels': ['cloud'],
'locked_levels': ['book', 'feather', 'lamp'],
}
def __init__(self, *args, **kwargs):
if kwargs.get('load'):
self.load()
else:
self.available_levels = kwargs.get('available_levels', self.default_data['available_levels'])
self.locked_levels = kwargs.get('locked_levels', self.default_data['locked_levels'])
def to_json(self):
data = {}
data['available_levels'] = self.available_levels
data['locked_levels'] = self.locked_levels
return json.dumps(data)
def save(self):
f = open(self.FILE_NAME, 'w')
f.write(self.to_json())
f.close()
def load(self):
try:
f = open(self.FILE_NAME, 'r')
data = json.loads(f.read())
f.close()
self.available_levels = data.get('available_levels', self.default_data['available_levels'])
self.locked_levels = data.get('locked_levels', self.default_data['locked_levels'])
except Exception, e:
data = self.default_data
def unlock_next_level(self, current_level):
self.load()
last_available_level = self.available_levels[len(self.available_levels) - 1]
if last_available_level == current_level and self.locked_levels:
next_level = self.locked_levels.pop(0)
self.available_levels.append(next_level)
self.save()
class MultipleChoiceQuizBase(object):
#to store used questions
used_questions = []
#stores questions to ask
questions = []
def __init__(self, asset_file, dont_load=False, game_state={}):
self.asset_file = asset_file
if not dont_load:
self.load_questions()
self.max_lives = 3
self.current_lives = self.max_lives
self.score = 0
self.game_state = GameState(**game_state)
def win(self):
self.score += 1
def loss(self):
self.current_lives -= 1
def game_over(self):
return self.current_lives < 1
def has_won(self):
return len(self.questions) == 0
def load_questions(self):
self.questions = load_json(self.asset_file)
#randomizing the list
shuffle(self.questions)
self.used_questions = []
def get_random_question(self):
if len(self.questions) == 0:
#reload questions because there is no content!
self.load_questions()
question = self.questions.pop(0)
self.used_questions.append(question)
return question
def add_new_level(self, next_level):
if self.game.locked_levels:
next_level = self.game.locked_levels.pop()
if next_level not in self.game.available_levels():
self.game.available_levels.append(next_level)
self.game.save()
class SabioData(MultipleChoiceQuizBase):
def __init__(self, dont_load=False):
super(SabioData, self).__init__('sabio.json', dont_load)
class PoetaData(MultipleChoiceQuizBase):
def __init__(self, dont_load=False):
super(PoetaData, self).__init__('poeta.json', dont_load)
class CuenteroData(MultipleChoiceQuizBase):
def __init__(self, dont_load=False):
super(CuenteroData, self).__init__('cuentero.json', dont_load)
class GenioData(MultipleChoiceQuizBase):
def __init__(self, dont_load=False):
super(GenioData, self).__init__('genio.json', dont_load)
#utils funtions
def load_json(file_name):
contents = open("data/%s" % file_name, 'r').read()
return json.loads(contents)
|
FundacionZamoraTeran/Genios
|
engine.py
|
Python
|
mit
| 3,831
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Legendgrouptitle(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "scatterpolar"
_path_str = "scatterpolar.legendgrouptitle"
_valid_props = {"font", "text"}
# font
# ----
@property
def font(self):
"""
Sets this legend group's title font.
The 'font' property is an instance of Font
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatterpolar.legendgrouptitle.Font`
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
Returns
-------
plotly.graph_objs.scatterpolar.legendgrouptitle.Font
"""
return self["font"]
@font.setter
def font(self, val):
self["font"] = val
# text
# ----
@property
def text(self):
"""
Sets the title of the legend group.
The 'text' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["text"]
@text.setter
def text(self, val):
self["text"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
font
Sets this legend group's title font.
text
Sets the title of the legend group.
"""
def __init__(self, arg=None, font=None, text=None, **kwargs):
"""
Construct a new Legendgrouptitle object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.scatterpolar.L
egendgrouptitle`
font
Sets this legend group's title font.
text
Sets the title of the legend group.
Returns
-------
Legendgrouptitle
"""
super(Legendgrouptitle, self).__init__("legendgrouptitle")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.scatterpolar.Legendgrouptitle
constructor must be a dict or
an instance of :class:`plotly.graph_objs.scatterpolar.Legendgrouptitle`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("font", None)
_v = font if font is not None else _v
if _v is not None:
self["font"] = _v
_v = arg.pop("text", None)
_v = text if text is not None else _v
if _v is not None:
self["text"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
|
plotly/plotly.py
|
packages/python/plotly/plotly/graph_objs/scatterpolar/_legendgrouptitle.py
|
Python
|
mit
| 4,732
|
from .credentials_decorator import preserve_credentials_state
from .load_yaml import load_yaml_file, load_yaml_directory
from .throttle_decorator import throttle
|
voxy/bluecanary
|
bluecanary/utilities/__init__.py
|
Python
|
mit
| 162
|
import json
import matplotlib.pyplot as plt
import matplotlib as mpl
import os
from scipy import stats
import numpy
import commonfunctions as cf
root_directory = os.path.dirname(os.path.abspath(os.curdir))
with open('complexity-time-party.json', 'r') as f:
results = json.load(f)
r, d = [None] * 2
for party in results:
if party['party'] == 'r':
r = party['data']
elif party['party'] == 'd':
d = party['data']
# The graph plots on the Y axis the relative amount of common nouns
#
# This is optional code for linear regression information/lines
#
# linr = stats.linregress(results[0], results[1])
# print stats.linregress(results[0], results[1])
# x = numpy.linspace(1960,2020,10)
# y = [linr.intercept + linr.slope * x_ for x_ in x]
plt.style.use('ggplot')
fig = plt.figure(0)
ax = fig.gca()
ax.grid(b=False)
ax.set_axis_bgcolor('white')
ax.set_xlim([1956, 2020])
ax.set_xticks(xrange(1960, 2020, 8))
ax.plot(r[0], r[1], label='Republican', lw=2.5)
ax.set_xlabel('Year')
ax.plot(d[0], d[1], label='Democrat', lw=2.5)
ax.legend()
ax.set_ylabel('Proportion of nouns in dictionary of 504 most common nouns')
ax.set_title('Occurrence of the most common 504 nouns in US presidential election campaigns',
fontdict={'fontsize': 11,
'fontweight': mpl.rcParams['axes.titleweight'],
'verticalalignment': 'baseline',
'horizontalalignment': 'center'},
y=1.05)
plt.savefig(os.path.join(root_directory, 'images', 'analysis-complexity-time-party.svg'), format='svg')
print cf.generate_rawgit_img_embed(os.path.join('images', 'analysis-complexity-time-party.svg'))
|
keelanfh/electionary
|
analysis/complexity-time-party-graph.py
|
Python
|
mit
| 1,683
|
import sys
'''
https://codility.com/demo/results/demoE4F8ST-A8T/
'''
def solution(A):
N = len(A) + 1
C = [0 for _ in xrange(N)]
for i in xrange(N - 1):
if 1 <= A[i] <= N:
C[A[i] - 1] = 1
for i in xrange(N):
if C[i] == 0:
return i + 1
return -1
def main(argv):
print solution([1, 3, 6, 4, 1, 2])
if __name__ == "__main__":
main(sys.argv[1:])
|
cowboysmall/codility
|
src/main/python/lessons/counting_elements/missing_integer.py
|
Python
|
mit
| 424
|
# -*- coding: utf-8 -*-
import pytest
from tictail import Tictail
from tictail.client import DEFAULT_CONFIG
from tictail.resource import Cards
class TestClient(object):
def test_construction(self, test_token, client):
assert client.access_token == test_token
assert client.transport is not None
assert client.config is not None
assert client.config == DEFAULT_CONFIG
def test_make_transport(self, test_token, client):
transport = client._make_transport()
assert transport.config == DEFAULT_CONFIG
assert transport.access_token == test_token
def test_make_config(self, client):
config = client._make_config({
'version': 2,
'base': 'test.foo.bar'
})
assert config['version'] == 2
assert config['base'] == 'test.foo.bar'
config = client._make_config(None)
assert config == DEFAULT_CONFIG
def test_config_override_via_constructor(self):
client = Tictail('test', {
'version': 2
})
assert client.config['version'] == 2
assert client.config['base'] == DEFAULT_CONFIG['base']
def test_make_store_subresource(self, client):
with pytest.raises(ValueError):
client._make_store_subresource(Cards, None)
resource = client._make_store_subresource(Cards, 1)
assert resource.uri == '/stores/1/cards'
@pytest.mark.parametrize('method,expected_uri', [
('followers', '/stores/1/followers'),
('cards', '/stores/1/cards'),
('customers', '/stores/1/customers'),
('products', '/stores/1/products'),
('orders', '/stores/1/orders'),
])
def test_default_factories(self, client, method, expected_uri):
shortcut = getattr(client, method)
resource = shortcut(1)
assert resource.uri == expected_uri
|
tictail/tictail-python
|
tests/unit/test_client.py
|
Python
|
mit
| 1,884
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import json
from frappe.utils import cstr
from frappe import _
from frappe.model.document import Document
from frappe.model.docfield import supports_translation
class CustomField(Document):
def autoname(self):
self.set_fieldname()
self.name = self.dt + "-" + self.fieldname
def set_fieldname(self):
if not self.fieldname:
label = self.label
if not label:
if self.fieldtype in ["Section Break", "Column Break"]:
label = self.fieldtype + "_" + str(self.idx)
else:
frappe.throw(_("Label is mandatory"))
# remove special characters from fieldname
self.fieldname = "".join(filter(lambda x: x.isdigit() or x.isalpha() or '_',
cstr(label).replace(' ','_')))
# fieldnames should be lowercase
self.fieldname = self.fieldname.lower()
def validate(self):
meta = frappe.get_meta(self.dt, cached=False)
fieldnames = [df.fieldname for df in meta.get("fields")]
if self.insert_after=='append':
self.insert_after = fieldnames[-1]
if self.insert_after and self.insert_after in fieldnames:
self.idx = fieldnames.index(self.insert_after) + 1
self._old_fieldtype = self.db_get('fieldtype')
if not self.fieldname:
frappe.throw(_("Fieldname not set for Custom Field"))
if self.get('translatable', 0) and not supports_translation(self.fieldtype):
self.translatable = 0
if not self.flags.ignore_validate:
from frappe.core.doctype.doctype.doctype import check_if_fieldname_conflicts_with_methods
check_if_fieldname_conflicts_with_methods(self.dt, self.fieldname)
def on_update(self):
frappe.clear_cache(doctype=self.dt)
if not self.flags.ignore_validate:
# validate field
from frappe.core.doctype.doctype.doctype import validate_fields_for_doctype
validate_fields_for_doctype(self.dt)
# update the schema
if not frappe.db.get_value('DocType', self.dt, 'issingle'):
if (self.fieldname not in frappe.db.get_table_columns(self.dt)
or getattr(self, "_old_fieldtype", None) != self.fieldtype):
from frappe.model.db_schema import updatedb
updatedb(self.dt)
def on_trash(self):
# delete property setter entries
frappe.db.sql("""\
DELETE FROM `tabProperty Setter`
WHERE doc_type = %s
AND field_name = %s""",
(self.dt, self.fieldname))
frappe.clear_cache(doctype=self.dt)
def validate_insert_after(self, meta):
if not meta.get_field(self.insert_after):
frappe.throw(_("Insert After field '{0}' mentioned in Custom Field '{1}', with label '{2}', does not exist")
.format(self.insert_after, self.name, self.label), frappe.DoesNotExistError)
if self.fieldname == self.insert_after:
frappe.throw(_("Insert After cannot be set as {0}").format(meta.get_label(self.insert_after)))
@frappe.whitelist()
def get_fields_label(doctype=None):
return [{"value": df.fieldname or "", "label": _(df.label or "")}
for df in frappe.get_meta(doctype).get("fields")]
def create_custom_field_if_values_exist(doctype, df):
df = frappe._dict(df)
if df.fieldname in frappe.db.get_table_columns(doctype) and \
frappe.db.sql("""select count(*) from `tab{doctype}`
where ifnull({fieldname},'')!=''""".format(doctype=doctype, fieldname=df.fieldname))[0][0]:
create_custom_field(doctype, df)
def create_custom_field(doctype, df):
df = frappe._dict(df)
if not df.fieldname and df.label:
df.fieldname = frappe.scrub(df.label)
if not frappe.db.get_value("Custom Field", {"dt": doctype, "fieldname": df.fieldname}):
frappe.get_doc({
"doctype":"Custom Field",
"dt": doctype,
"permlevel": df.permlevel or 0,
"label": df.label,
"fieldname": df.fieldname,
"fieldtype": df.fieldtype or 'Data',
"options": df.options,
"insert_after": df.insert_after,
"print_hide": df.print_hide,
"hidden": df.hidden or 0
}).insert()
def create_custom_fields(custom_fields):
'''Add / update multiple custom fields
:param custom_fields: example `{'Sales Invoice': [dict(fieldname='test')]}`'''
for doctype, fields in custom_fields.items():
if isinstance(fields, dict):
# only one field
fields = [fields]
for df in fields:
field = frappe.db.get_value("Custom Field", {"dt": doctype, "fieldname": df["fieldname"]})
if not field:
try:
create_custom_field(doctype, df)
except frappe.exceptions.DuplicateEntryError:
pass
else:
custom_field = frappe.get_doc("Custom Field", field)
custom_field.update(df)
custom_field.save()
@frappe.whitelist()
def add_custom_field(doctype, df):
df = json.loads(df)
return create_custom_field(doctype, df)
|
tundebabzy/frappe
|
frappe/custom/doctype/custom_field/custom_field.py
|
Python
|
mit
| 4,677
|
#!/usr/bin/env python3
'''
Given two strings, create a function that returns the total number of unique characters from the combined string.
'''
def count_unique(s1, s2):
return len(set(s1 + s2))
|
JLJTECH/TutorialTesting
|
Edabit/SimpleSetLength.py
|
Python
|
mit
| 198
|
# this model handles game status awareness of the engine
# Defines the game object class
### Begin GameVars Definition ###
class GameVars(object):
def __init__(self, id=False, start_at=False, end_at=False, time_per_food=0, stun_timer=0, cure_timer=0,
bite_shares_per_food=0,
pause_starts_at=0, pause_ends_at=0, created=0, posttimeout=1, signup_start_at=0, signup_end_at=0,
game_name=''):
self.id = id
self.start_at = start_at
self.end_at = end_at
self.time_per_food = time_per_food
self.stun_timer = stun_timer
self.cure_timer = cure_timer
self.signup_start_at = signup_start_at
self.signup_end_at = signup_end_at
self.bite_shares_per_food = bite_shares_per_food
self.pause_starts_at = pause_starts_at
self.pause_ends_at = pause_ends_at
self.created = created
self.posttimeout = posttimeout
self.game_name = game_name
def getId(self):
return self.id
def gameStart(self):
return self.start_at
def gameEnd(self):
return self.end_at
def getName(self):
return self.game_name
def isGameActive(self):
if self.start_at > getEstNow():
if self.end_at < getEstNow():
return True
else:
return False
else:
return False
def isGameUpcoming(self):
if self.start_at < getEstNow():
if self.end_at < getEstNow():
return True
else:
return False
else:
return False
def starveTimer(self):
return self.time_per_food
def stunTime(self):
return self.stun_timer
def postTimer(self):
return self.posttimeout
# checks if registration is open and returns True or False
def checkReg(self):
if getEstNow() > self.signup_start_at:
if getEstNow() < self.signup_end_at:
return True
else:
return False
else:
return False
# returns a new starve timer based on the current time per food
def addFoodTimer(self):
newtime = getEstNow() + timedelta(seconds=self.time_per_food)
return newtime
# returns a starvetimer with only half of the time
def shareBite(self):
newtime = getEstNow() + timedelta(seconds=(self.time_per_food / 2))
return newtime
# checks if a the cure timer has elapsed since a player was last bitten (takes a joined rows object)
def checkInfection(self, player):
bitecheck = db(
(db.bite_event.human_id == player.game_part.id) & (db.bite_event.game_id == self.id)).select().last()
if bitecheck:
inftimer = getEstNow() - bitecheck.created
if self.cure_timer > inftimer.seconds:
return False
else:
return True
else:
return False
# This is called by the bitecodepg controller and returns a bitecode form based on current game variables
def buildBiteForm(self, qrargs):
if self.bite_shares_per_food > 0:
maxshare = int((self.time_per_food * .85))
form = SQLFORM.factory(Field("Bitecode", default=qrargs),
Field("Lat", default='', writable=True, requires=IS_NOT_EMPTY()),
Field("Long", writable=True, requires=IS_NOT_EMPTY()),
Field("share", 'boolean', label="Share this bite?"),
Field("timeshared", 'integer', default=maxshare, label="Time to share"),
submit_button="Bite!")
else:
form = SQLFORM.factory(Field("Bitecode", default=qrargs),
Field("Lat", default='', writable=True, requires=IS_NOT_EMPTY()),
Field("Long", writable=True, requires=IS_NOT_EMPTY()),
Field("share", 'boolean', default=False, writable=False, readable=True),
submit_button="Bite!")
return form
def sharingActive(self):
if self.bite_shares_per_food > 0:
return True
else:
return False
# returns the maximum amout of a share, based on the current time_per_food
def maxShare(self):
return int((self.time_per_food * .85))
# returns the minimum amout of a share, based on the current time_per_food
def minShare(self):
return int((self.time_per_food * .15))
# returns the total time_per_food
def timePerFood(self):
return self.time_per_food
def starveTimer(self):
return self.time_per_food
def stunTime(self):
return self.stun_timer
def postTimer(self):
return self.posttimeout
# checks if registration is open and returns True or False
def checkReg(self):
if getEstNow() > self.signup_start_at:
if getEstNow() < self.signup_end_at:
return True
else:
return False
else:
return False
# returns a new starve timer based on the current time per food
def addFoodTimer(self):
newtime = getEstNow() + timedelta(seconds=self.time_per_food)
return newtime
# returns a starvetimer with only half of the time
def shareBite(self):
newtime = getEstNow() + timedelta(seconds=(self.time_per_food / 2))
return newtime
# checks if a the cure timer has elapsed since a player was last bitten (takes a joined rows object)
def checkInfection(self, player):
bitecheck = db(
(db.bite_event.human_id == player.game_part.id) & (db.bite_event.game_id == self.id)).select().last()
if bitecheck:
inftimer = getEstNow() - bitecheck.created
if self.cure_timer > inftimer.seconds:
return False
else:
return True
else:
return False
# This is called by the bitecodepg controller and returns a bitecode form based on current game variables
def buildBiteForm(self, qrargs):
if self.bite_shares_per_food > 0:
maxshare = int((self.time_per_food * .85))
form = SQLFORM.factory(Field("Bitecode", default=qrargs),
Field("Lat", default='', writable=True, requires=IS_NOT_EMPTY()),
Field("Long", writable=True, requires=IS_NOT_EMPTY()),
Field("share", 'boolean', label="Share this bite?"),
Field("timeshared", 'integer', default=maxshare, label="Time to share"),
submit_button="Bite!")
else:
form = SQLFORM.factory(Field("Bitecode", default=qrargs),
Field("Lat", default='', writable=True, requires=IS_NOT_EMPTY()),
Field("Long", writable=True, requires=IS_NOT_EMPTY()),
Field("share", 'boolean', default=False, writable=False, readable=True),
submit_button="Bite!")
return form
def sharingActive(self):
if self.bite_shares_per_food > 0:
return True
else:
return False
# returns the maximum amout of a share, based on the current time_per_food
def maxShare(self):
return int((self.time_per_food * .85))
# returns the minimum amout of a share, based on the current time_per_food
def minShare(self):
return int((self.time_per_food * .15))
# returns the total time_per_food
def timePerFood(self):
return self.time_per_food
### end GameVars class definition ###
# Checks to see if there is an active game.
# If there is it will create a GameVars object with the vars.
# If not it creates an empty GameVars object.
try:
games = db(db.games).select(db.games.ALL, orderby=db.games.created,cache=(cache.ram, 30), cacheable=True)
if getEstNow() < games.last().end_at:
gameinfo = GameVars(games.last().id, games.last().start_at, games.last().end_at, games.last().time_per_food,
games.last().stun_timer, games.last().cure_timer, games.last().bite_shares_per_food,
games.last().pause_starts_at, games.last().pause_ends_at,
games.last().created, games.last().posttimeout, games.last().signup_start_at,
games.last().signup_end_at, games.last().game_name)
else:
gameinfo = GameVars()
except:
gameinfo = GameVars()
|
evadestruco/pandemic2
|
models/dbpreloader.py
|
Python
|
mit
| 8,857
|
# -*- coding: utf-8 -*-
from collections import Counter
__author__ = "Sergey Aganezov"
__email__ = "aganezov(at)cs.jhu.edu"
__status__ = "production"
class KBreak(object):
""" A generic object that can represent any k-break ( k>= 2)
A notion of k-break arises from the bioinformatics combinatorial object BreakpointGraph and is first mentioned in http://home.gwu.edu/~maxal/ap_tcs08.pdf
A generic k-break operates on k specified edges of spisific multicolor and replaces them with another set of k edges with the same multicolor on the same set of vertices in way, that the degree of vertices is kept intact.
Initialization of the instance of :class:`KBreak` is performed with a validity check of supplied data, which must comply with the definition of k-break.
Class carries following attributes carrying information about k-break structure:
* :attr:`KBreak.start_edges`: a list of edges (in terms of paired vertices) that are to be removed by current :class:`KBreak`
* :attr:`KBreak.result_edges`: a list of edges (in terms of paired vertices) that are to be created by current :class:`KBreak`
* :attr:`KBreak.multicolor`: a :class:`bg.multicolor.Multicolor` instance, that specifies the multicolor of edges that are to be removed / created by current :class:`KBreak`
Main operations:
* :meth:`KBreak.valid_kbreak_matchings`: a method that checks if provided sets of started / resulted edges comply with the notions ob k-break definition
"""
def __init__(self, start_edges, result_edges, multicolor, data=None):
""" Initialization of :class:`KBreak` object.
The initialization process consists of multiple checks, before any assignment and initialization itself is performed.
First checks the fact, that information about start / result edges is supplied in form of paired vertices.
Then check is performed to make sure, that degrees of vertices, that current :class:`KBreak` operates on, is preserved.
:param start_edges: a list of pairs of vertices, that specifies where edges shall be removed by current :class:`KBreak`
:type start_edges: ``list(tuple(vertex, vertex), ...)``
:param result_edges: a list of pairs of vertices, that specifies where edges shall be created by current :class:`KBreak`
:type result_edges: ``list(tuple(vertex, vertex), ...)``
:param multicolor: a multicolor, that specifies which edges between specified pairs of vertices are to be removed / created
:type multicolor: :class:`bg.multicolor.Multicolor`
:return: a new instance of :class:`Kbreak`
:rtype: :class:`KBreak`
:raises: ``ValueError``
"""
self.start_edges = start_edges
self.result_edges = result_edges
self.multicolor = multicolor
if data is None:
data = self.create_default_data_dict()
self.data = data
for vertex_pair in self.start_edges:
if len(vertex_pair) != 2:
raise ValueError("Expected edges in a form of pairs of vertices.\n "
"Not a pair of vertices ({issue}) in start edges."
"".format(issue=str(vertex_pair)))
for vertex_pair in self.result_edges:
if len(vertex_pair) != 2:
raise ValueError("Expected edges in a form of pairs of vertices.\n "
"Not a pair of vertices ({issue}) in result edges."
"".format(issue=str(vertex_pair)))
if not KBreak.valid_kbreak_matchings(start_edges=self.start_edges,
result_edges=self.result_edges):
raise ValueError("Supplied sets of start and result edges do not correspond to "
"correct k-break operation (either the set of vertices is not consistent, or "
"the degrees of vertices change)")
@property
def is_a_two_break(self):
return len(self.start_edges) == 2
@property
def is_a_fusion(self):
return self.is_a_two_break and any(map(lambda vertex_set: all(map(lambda vertex: vertex.is_irregular_vertex, vertex_set)), self.result_edges))
@classmethod
def create_default_data_dict(cls):
return {
"origin": None
}
@staticmethod
def valid_kbreak_matchings(start_edges, result_edges):
""" A staticmethod check implementation that makes sure that degrees of vertices, that are affected by current :class:`KBreak`
By the notion of k-break, it shall keep the degree of vertices in :class:`bg.breakpoint_graph.BreakpointGraph` the same, after its application.
By utilizing the Counter class, such check is performed, as the number the vertex is mentioned corresponds to its degree.
:param start_edges: a list of pairs of vertices, that specifies where edges shall be removed by :class:`KBreak`
:type start_edges: ``list(tuple(vertex, vertex), ...)``
:param result_edges: a list of pairs of vertices, that specifies where edges shall be created by :class:`KBreak`
:type result_edges: ``list(tuple(vertex, vertex), ...)``
:return: a flag indicating if the degree of vertices are equal in start / result edges, targeted by :class:`KBreak`
:rtype: ``Boolean``
"""
start_stats = Counter(vertex for vertex_pair in start_edges for vertex in vertex_pair)
result_stats = Counter(vertex for vertex_pair in result_edges for vertex in vertex_pair)
return start_stats == result_stats
|
aganezov/bg
|
bg/kbreak.py
|
Python
|
mit
| 5,637
|
from schedule.models import *
import json
json_data = open('C:\\Users\\simeon.COMSOFTL\\Desktop\\HackFMI-backend\\FMICalendar-REST\\FMI-Data-master\\teachers.json','rb')
data = json.load(json_data)
departmentsMap={u'Àëã': 1, u'ÀÌ':2, u'ÂÎÈÑ':3, u'Ãåîì':4, u'ÄÓ':5, u'ÈÑ':6, u'ÈÒ':7, u'ÊÀÒ':8, u'ÊÈ':9, u'ÌËÏ':10, u'ÌÀ':11, u'ÎÌÈ':12, u'ÑÒ':13, u'×ÌÀ':14, u'ËÌÌÈ~~':15, u'Íåîïð':16}
for x in data:
try:
t = Teacher(id=x['teacherId'], name=x['teacherName'], title=x['teacherTitle'], email=x['teacherEmail'], position=x['teacherPosition'], department=Department.objects.get(id=departmentsMap[x['department']]))
except UnicodeEncodeError:
continue
|
DeltaEpsilon-HackFMI2/FMICalendar-REST
|
maper.py
|
Python
|
mit
| 680
|
def tree_mirror(node):
if not node:
return
node.left, node.right = node.right, node.left
tree_mirror(node.left)
tree_mirror(node.right)
|
yangshun/tech-interview-handbook
|
experimental/utilities/python/tree_mirror.py
|
Python
|
mit
| 160
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._subscription_operations import build_accept_ownership_request_initial, build_accept_ownership_status_request, build_cancel_request, build_enable_request, build_rename_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class SubscriptionOperations:
"""SubscriptionOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.subscription.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def cancel(
self,
subscription_id: str,
**kwargs: Any
) -> "_models.CanceledSubscriptionId":
"""The operation to cancel a subscription.
:param subscription_id: Subscription Id.
:type subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CanceledSubscriptionId, or the result of cls(response)
:rtype: ~azure.mgmt.subscription.models.CanceledSubscriptionId
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CanceledSubscriptionId"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_cancel_request(
subscription_id=subscription_id,
template_url=self.cancel.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponseBody, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('CanceledSubscriptionId', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
cancel.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Subscription/cancel'} # type: ignore
@distributed_trace_async
async def rename(
self,
subscription_id: str,
body: "_models.SubscriptionName",
**kwargs: Any
) -> "_models.RenamedSubscriptionId":
"""The operation to rename a subscription.
:param subscription_id: Subscription Id.
:type subscription_id: str
:param body: Subscription Name.
:type body: ~azure.mgmt.subscription.models.SubscriptionName
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RenamedSubscriptionId, or the result of cls(response)
:rtype: ~azure.mgmt.subscription.models.RenamedSubscriptionId
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RenamedSubscriptionId"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(body, 'SubscriptionName')
request = build_rename_request(
subscription_id=subscription_id,
content_type=content_type,
json=_json,
template_url=self.rename.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponseBody, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('RenamedSubscriptionId', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
rename.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Subscription/rename'} # type: ignore
@distributed_trace_async
async def enable(
self,
subscription_id: str,
**kwargs: Any
) -> "_models.EnabledSubscriptionId":
"""The operation to enable a subscription.
:param subscription_id: Subscription Id.
:type subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EnabledSubscriptionId, or the result of cls(response)
:rtype: ~azure.mgmt.subscription.models.EnabledSubscriptionId
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EnabledSubscriptionId"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_enable_request(
subscription_id=subscription_id,
template_url=self.enable.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponseBody, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('EnabledSubscriptionId', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
enable.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Subscription/enable'} # type: ignore
async def _accept_ownership_initial(
self,
subscription_id: str,
body: "_models.AcceptOwnershipRequest",
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(body, 'AcceptOwnershipRequest')
request = build_accept_ownership_request_initial(
subscription_id=subscription_id,
content_type=content_type,
json=_json,
template_url=self._accept_ownership_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
if cls:
return cls(pipeline_response, None, response_headers)
_accept_ownership_initial.metadata = {'url': '/providers/Microsoft.Subscription/subscriptions/{subscriptionId}/acceptOwnership'} # type: ignore
@distributed_trace_async
async def begin_accept_ownership(
self,
subscription_id: str,
body: "_models.AcceptOwnershipRequest",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Accept subscription ownership.
:param subscription_id: Subscription Id.
:type subscription_id: str
:param body:
:type body: ~azure.mgmt.subscription.models.AcceptOwnershipRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._accept_ownership_initial(
subscription_id=subscription_id,
body=body,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_accept_ownership.metadata = {'url': '/providers/Microsoft.Subscription/subscriptions/{subscriptionId}/acceptOwnership'} # type: ignore
@distributed_trace_async
async def accept_ownership_status(
self,
subscription_id: str,
**kwargs: Any
) -> "_models.AcceptOwnershipStatusResponse":
"""Accept subscription ownership status.
:param subscription_id: Subscription Id.
:type subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AcceptOwnershipStatusResponse, or the result of cls(response)
:rtype: ~azure.mgmt.subscription.models.AcceptOwnershipStatusResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AcceptOwnershipStatusResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_accept_ownership_status_request(
subscription_id=subscription_id,
template_url=self.accept_ownership_status.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponseBody, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AcceptOwnershipStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
accept_ownership_status.metadata = {'url': '/providers/Microsoft.Subscription/subscriptions/{subscriptionId}/acceptOwnershipStatus'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/subscription/azure-mgmt-subscription/azure/mgmt/subscription/aio/operations/_subscription_operations.py
|
Python
|
mit
| 15,342
|
#!/usr/bin/env python
import argparse, csv, urllib2, json, base64, getpass
from hurry.filesize import size, si
parser = argparse.ArgumentParser(description="script that uses Binder's API to add AIP size to the granular ingest report")
parser.add_argument('-i', '--input', type=str, required=True, help='source data file.')
parser.add_argument('-o', '--output', type=str, required=False, help='where to put output')
parser.add_argument('-u', '--username', type=str, help='Binder username')
args = parser.parse_args()
if not (args.input):
parser.error('you did not specify a report file')
if not (args.username):
parser.error('you did not supply a username')
password = getpass.getpass("Enter your password: ")
firstline = True
with open(args.input, 'rb') as csvfile:
c = csv.writer(open(args.output, "wb"))
c.writerow(["ingest date","size","UUID"])
orig_report = csv.reader(csvfile, delimiter=',')
for row in orig_report:
if firstline:
firstline = False
continue
uuid = row[2]
request = urllib2.Request("http://drmc.museum.moma.org/api/aips/"+uuid)
base64string = base64.encodestring('%s:%s' % (args.username, password)).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
result = urllib2.urlopen(request)
date = row[6]
date_trimmed = date[:-10]
data = json.load(result)
size = data['size']
print date_trimmed, size, uuid
c.writerow([date_trimmed,size,uuid])
|
finoradin/moma-utils
|
reporting-tools/bandwidth.py
|
Python
|
mit
| 1,439
|
class Solution:
def hasCycle(self,head):
p1 = p2 = head
p1 = p1.next
p2 = p2.next.next
while(p1 != none and p2 != none):
p1 = p1.next
p2 = p2.next.next
if (p1 == p2 ): break
if p1==p2 : return True
else return False
|
xlres/leetcode
|
141.py
|
Python
|
mit
| 305
|
import RPi.GPIO as GPIO
import os
import time
class ButtonMatrix():
BUTTON_MATRIX = [[1,2,3,4],[5,6,7,8],[9,10,11,12],[13,14,15,16]]
ROW = [22,27,18,17]
COLUMN = [10,9,11,23]
def __init__(self):
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
def buttonPressed(self):
for i in range(len(self.COLUMN)):
GPIO.setup(self.COLUMN[i], GPIO.OUT)
GPIO.output(self.COLUMN[i], GPIO.LOW)
for i in range(len(self.ROW)):
GPIO.setup(self.ROW[i], GPIO.IN, pull_up_down=GPIO.PUD_UP)
row_value = None
for i in range(len(self.ROW)):
gpio_read_value = GPIO.input(self.ROW[i])
if gpio_read_value == 0:
row_value = i
if row_value < 0 or row_value > 3:
self.exit()
return
for i in range(len(self.COLUMN)):
GPIO.setup(self.COLUMN[i], GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(self.ROW[row_value], GPIO.OUT)
GPIO.output(self.ROW[row_value], GPIO.HIGH)
column_value = None
for i in range(len(self.COLUMN)):
gpio_read_value = GPIO.input(self.COLUMN[i])
if gpio_read_value == 1:
column_value=i
if column_value < 0 or column_value > 3:
self.exit()
return
self.exit()
return self.BUTTON_MATRIX[row_value][column_value]
def exit(self):
for i in range(len(self.ROW)):
GPIO.setup(self.ROW[i], GPIO.IN, pull_up_down=GPIO.PUD_UP)
for j in range(len(self.COLUMN)):
GPIO.setup(self.COLUMN[j], GPIO.IN, pull_up_down=GPIO.PUD_UP)
if __name__ == '__main__':
Button = ButtonMatrix()
while True:
button_value = Button.buttonPressed()
if button_value is not None:
if button_value == 15 or button_value == 16 or button_value == 13:
os.system("echo ' %d;' | pdsend 3000" % (button_value))
time.sleep(0.05)
else:
os.system("echo ' %d;' | pdsend 3000" % (button_value))
time.sleep(0.25)
button_value = None
|
akiress/music
|
ff/matrix.py
|
Python
|
mit
| 2,256
|
import dropbox
from django import forms
class ImportForm(forms.Form):
def __init__(self, *args, **kwargs):
token = kwargs.pop('token')
client = kwargs.pop('client')
super(ImportForm, self).__init__(*args, **kwargs)
choices = [('select_all_option', 'Select All')]
metadata = client.metadata('/')
directories = [(item.get('path'), item.get('path')) for item in metadata.get('contents') if item.get('is_dir')]
choices += directories
self.fields['folders'] = forms.MultipleChoiceField(
required=True,
choices=choices,
widget=forms.CheckboxSelectMultiple(),
)
|
phildini/bockus
|
books/forms.py
|
Python
|
mit
| 671
|
from spyre import server
import matplotlib.pyplot as plt
import numpy as np
class SimpleApp(server.App):
title = "Simple Sine App"
inputs = [{
"type": "slider",
"key": "freq",
"value": 5, "max": 10,
"action_id": "sine_wave_plot"
}]
outputs = [{"type": "plot",
"id": "sine_wave_plot"}]
def getPlot(self, params):
f = float(params['freq'])
x = np.arange(0, 2 * np.pi, np.pi / 150)
y = np.sin(f * x)
fig = plt.figure()
splt1 = fig.add_subplot(1, 1, 1)
splt1.plot(x, y)
return fig
def getCustomCSS(self):
css = (
"body { background-image: "
"url('https://github.com/adamhajari/spyre/blob/master/"
"examples/screenshots/jungle-cruise-gallery06.jpg?raw=true');}"
)
return css
app = SimpleApp()
app.launch()
|
adamhajari/spyre
|
examples/simple_sine_background_image_example.py
|
Python
|
mit
| 897
|
"""
WSGI config for kanjoos project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "kanjoos.settings")
application = get_wsgi_application()
|
vijayaganesh/Kanjoos-HackGT
|
django-webapp/kanjoos/kanjoos/wsgi.py
|
Python
|
mit
| 392
|
# vpnr 2
run_trial(rws[22], duration=8.0)
run_trial(cm100, duration=8.0, speed=300)
run_trial(rws[12], duration=8.0)
run_trial(rbs[12], duration=8.0)
run_trial(msm2, duration=7.000, speed=200)
run_trial(msm1, duration=7.000, speed=200)
run_trial(cm100, duration=8.0, speed=150)
run_trial(msm1, duration=9.000, speed=150)
run_trial(rws[5], duration=8.0)
run_trial(mem2, duration=9.000, speed=150)
run_trial(rbs[5], duration=8.0)
run_trial(rws[19], duration=8.0)
run_trial(rbs[1], duration=8.0)
run_trial(rbs[8], duration=8.0)
run_trial(mem0, duration=2.500, speed=800)
run_trial(rws[23], duration=8.0)
run_trial(mem2, duration=4.000, speed=400)
run_trial(mem2, duration=2.500, speed=800)
run_trial(rws[10], duration=8.0)
run_trial(hori2, duration=1.500, speed=800)
run_trial(hori1, duration=3.000, speed=400)
show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',))
run_trial(hori0, duration=3.000, speed=400)
run_trial(mem1, duration=4.000, speed=400)
run_trial(rbs[13], duration=8.0)
run_trial(mem1, duration=2.500, speed=800)
run_trial(msm0, duration=5.000, speed=300)
run_trial(hori2, duration=3.000, speed=400)
run_trial(cm200, duration=8.0, speed=400)
run_trial(cm400, duration=8.0, speed=200)
run_trial(rbs[21], duration=8.0)
run_trial(mem2, duration=5.000, speed=300)
run_trial(hori1, duration=6.000, speed=200)
run_trial(hori0, duration=4.000, speed=300)
run_trial(cm200, duration=8.0, speed=200)
run_trial(mem0, duration=9.000, speed=150)
run_trial(cm400, duration=8.0, speed=400)
run_trial(rbs[16], duration=8.0)
run_trial(msm2, duration=5.000, speed=300)
run_trial(rbs[3], duration=8.0)
run_trial(mem1, duration=7.000, speed=200)
run_trial(rws[3], duration=8.0)
run_trial(rbs[15], duration=8.0)
show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',))
run_trial(rbs[11], duration=8.0)
run_trial(rbs[6], duration=8.0)
run_trial(rbs[19], duration=8.0)
run_trial(rws[9], duration=8.0)
run_trial(rws[11], duration=8.0)
run_trial(mem2, duration=3.000, speed=600)
run_trial(msm1, duration=5.000, speed=300)
run_trial(rbs[18], duration=8.0)
run_trial(msm2, duration=9.000, speed=150)
run_trial(rbs[0], duration=8.0)
run_trial(rws[2], duration=8.0)
run_trial(hori2, duration=4.000, speed=300)
run_trial(msm2, duration=3.000, speed=600)
run_trial(mem1, duration=5.000, speed=300)
run_trial(cm400, duration=8.0, speed=150)
run_trial(msm2, duration=4.000, speed=400)
run_trial(rws[1], duration=8.0)
run_trial(rbs[4], duration=8.0)
run_trial(hori0, duration=8.000, speed=150)
run_trial(rbs[9], duration=8.0)
run_trial(msm0, duration=4.000, speed=400)
show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',))
run_trial(msm0, duration=9.000, speed=150)
run_trial(hori1, duration=1.500, speed=800)
run_trial(rws[6], duration=8.0)
run_trial(rws[13], duration=8.0)
run_trial(rws[14], duration=8.0)
run_trial(hori2, duration=8.000, speed=150)
run_trial(msm0, duration=7.000, speed=200)
run_trial(mem0, duration=5.000, speed=300)
run_trial(mem1, duration=3.000, speed=600)
run_trial(msm2, duration=2.500, speed=800)
run_trial(hori0, duration=2.000, speed=600)
run_trial(rws[24], duration=8.0)
run_trial(cm200, duration=8.0, speed=800)
run_trial(mem2, duration=7.000, speed=200)
run_trial(hori0, duration=1.500, speed=800)
run_trial(hori1, duration=4.000, speed=300)
run_trial(hori1, duration=8.000, speed=150)
run_trial(hori0, duration=6.000, speed=200)
run_trial(cm400, duration=8.0, speed=600)
run_trial(cm100, duration=8.0, speed=200)
run_trial(msm0, duration=3.000, speed=600)
show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',))
run_trial(rbs[20], duration=8.0)
run_trial(mem0, duration=7.000, speed=200)
run_trial(msm1, duration=3.000, speed=600)
run_trial(rws[15], duration=8.0)
run_trial(rbs[22], duration=8.0)
run_trial(rws[17], duration=8.0)
run_trial(rbs[24], duration=8.0)
run_trial(rbs[14], duration=8.0)
run_trial(cm100, duration=8.0, speed=600)
run_trial(rws[20], duration=8.0)
run_trial(msm1, duration=4.000, speed=400)
run_trial(cm400, duration=8.0, speed=800)
run_trial(cm400, duration=8.0, speed=300)
run_trial(msm1, duration=2.500, speed=800)
run_trial(hori2, duration=6.000, speed=200)
run_trial(rws[16], duration=8.0)
run_trial(rbs[2], duration=8.0)
run_trial(rws[7], duration=8.0)
run_trial(hori2, duration=2.000, speed=600)
run_trial(cm200, duration=8.0, speed=600)
run_trial(rws[8], duration=8.0)
show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',))
run_trial(cm200, duration=8.0, speed=150)
run_trial(cm100, duration=8.0, speed=800)
run_trial(mem1, duration=9.000, speed=150)
run_trial(rws[0], duration=8.0)
run_trial(cm100, duration=8.0, speed=400)
run_trial(rws[4], duration=8.0)
run_trial(mem0, duration=4.000, speed=400)
run_trial(rbs[7], duration=8.0)
run_trial(rbs[17], duration=8.0)
run_trial(hori1, duration=2.000, speed=600)
run_trial(rws[21], duration=8.0)
run_trial(rbs[23], duration=8.0)
run_trial(rws[18], duration=8.0)
run_trial(mem0, duration=3.000, speed=600)
run_trial(cm200, duration=8.0, speed=300)
run_trial(msm0, duration=2.500, speed=800)
run_trial(rbs[10], duration=8.0)
run_movie(movie1audio, 'Jetzt folgt ein Video mit Ton.\n\nWeiter mit Leertaste')
run_movie(movie2noaudio, 'Jetzt folgt ein Video OHNE Ton.\n\nWeiter mit Leertaste')
|
derNarr/synchronicity
|
experiment/sessions/ses_vp02.py
|
Python
|
mit
| 5,331
|
import unittest
from example import main_1
class TestExample(unittest.TestCase):
def test_example_read_file(self):
self.assertEqual(main_1(), ['1', '2', '3'])
if __name__ == '__main__':
unittest.main()
|
berjc/code-complete
|
examples/open_file/test.py
|
Python
|
mit
| 224
|
from flask import (Flask,
render_template, session,
redirect, url_for, flash
)
#eklentiler
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from flask_script import Manager
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired, Length
app = Flask(__name__)
app.config.update(
SECRET_KEY = "48fsdfs+dg5!423a-.das/",
SQLALCHEMY_DATABASE_URI = "sqlite:///data.sqlite",
SQLALCHEMY_TRACK_MODIFICATIONS = True,
DEBUG=True,
)
db = SQLAlchemy(app)
bootstrap = Bootstrap(app)
moment = Moment(app)
manager = Manager(app)
class UserForm(FlaskForm):
"""
- Form sınıflarını tutacak olan moduldür.
- Bu form sınıfları html formlarına karşılık gelir
"""
name = StringField('İsmini gir', validators=[DataRequired(), Length(3,20)])
submit = SubmitField('Gönder')
class User(db.Model):
"""
- SQLAlchemy kütüphanesini kullanarak tablolara karşılık gelecek olan sınıflar
oluşturulur.
- Bu sınıflar db.Model sınıfından türetiliyor.
- Her sınıf bir tabloya karşılık gelir.
"""
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), unique=True)
@app.route('/')
def index(name='Yabancı'):
"""
name değişkenine değer atanımışsa o kullanılıyor, atanmamışsa bilinmeyen kişi,
anlamında Yabancı ismini veriyor
"""
if session.get('name'):
name = session.get('name')
return render_template('index.html', name=name)
@app.route('/kayıt', methods=['GET', 'POST'])
def register():
"""
Kayıt işlemlerini yapacak olan sayfanın view fonksiyonu.
form verileri post metodu ile gelince, form alanı olarak `name` alanımız var,
bunun içerisindeki verileri almak için `form.name.data` kullanıyoruz.
form gönderme işleminden sonra `/` index view fonksiyonuna yönlendiriyoruz(redirect).
"""
form = UserForm()
if form.validate_on_submit():
name = form.name.data
session["name"] = name
return redirect(url_for('index'))
return render_template('register.html', form=form)
@app.route('/veritabanına-kayıt', methods=['GET', 'POST'])
def register_todb():
"""
Veritabanına kayıt eden view fonksiyonu
"""
form = UserForm()
if form.validate_on_submit():
user = User(username=form.name.data)
db.session.add(user)
db.session.commit()
session["name"] = user.username
flash("Kullanıcı veritabanına kaydedildi.")
return redirect(url_for('index'))
return render_template('registertodb.html', form=form)
@app.route('/user/<username>')
def user(username):
"""
url de yazılan kullanıcı ismi veri tabanında varsa geriye bilgileri gönderilir
"""
# veritabanında eşleşen ilk kaydı geri dönderir
user = User.query.filter_by(username=username).first()
return render_template('user.html', user=user)
if __name__ == '__main__':
manager.run()
|
metmirr/wwwmtndmrcom
|
flask_egitimi/partone/app/hello.py
|
Python
|
mit
| 2,920
|
import os
import vision_main
import serial
import commands
import time
import jenga_logic
def versus_human_smart():
ser = serial.Serial('/dev/ttyS0', 9600);
print ("serial successful");
t = jenga_logic.create_tower();
currentxyz =[];
while(jenga_logic.current_state(t)): #while tower is still up
t_full = vision_main.get_tower_from_vision();
t = t_full[0];
print("robot thinks the tower is");
print(t);
next_move = jenga_logic.make_best_move(t);
in_str = str(next_move[0]) + ' ';
in_str += str(next_move[1]) + ' ';
in_str += str(next_move[2]) + ' ';
in_str += str(next_move[3]) + ' 1 >waypoints.txt';
in_str = './motorpath '+ in_str;
os.system(in_str);
vision_mode = 'take'
way_point_file = open('./waypoints.txt');
for line in way_point_file:
print (line);
print("line was printed");
inpt = line;
if (line[0] == 'P'):
if (currentxyz == []):
assert(False);
startr, startc, endr, endc = next_move;
if (vision_mode == 'take'):
r_goal = startr;
c_goal = startc;
else:
r_goal = endr;
c_goal = endc;
error = vision_main.distance_from_goal(r_goal, c_goal, t_full);
print('error is');
print(error);
i = raw_input("test_error")
new_goal = [];
for i in range(3):
new_goal.append(current_xyz[i] + error[i]);
lin = [];
for i in range(3):
st = str(new_goal[i]).zfill(5);
lin.append(theta_servo);
lin = ','.join(lin);
lin = 'M,' + lin + '\n';
print('lin is now');
print(lin);
i = raw_input('testing error correction');
if (line[0] == 'M'):
inpt = inpt[2::];#get rid of the M,
inpt = inpt.split(',');
theta_goal = int(float(inpt[-1])); #save the theta goal
inpt = inpt[:-1];
inpt = ' '.join(inpt);
cmd_str = "./jenga_test "+inpt;
lin = commands.getoutput(cmd_str);
lin = lin.split(',');
xyz_temp = lin[:-1];
currentxyz = map(int, xyz_temp);
theta_ee = int(float(lin[-1]));
lin = lin[:-1];
theta_servo = theta_goal - theta_ee;
theta_servo = theta_servo + 100;#should be 90
theta_servo = str(theta_servo).zfill(4);
#print(theta_ee);
lin.append(theta_servo);
lin = ','.join(lin);
lin = lin.replace(' ', '');
lin = 'M,' + lin + '\n';
else:
lin = line;
print (lin)
print("lin was printed");
time.sleep(1)
ser.setDTR(level=0)
time.sleep(1)
temp = ser.write(lin);
result = ser.readline();
print result;
print('move was')
print(next_move);
print(tower_is);
way_point_file.close();
jenga_logic.make_full_move(t, next_move[0], next_move[1], next_move[2], next_move[3]);
print (t);
ser.close()
return;
def versus_human():
ser = serial.Serial('/dev/ttyS0', 9600);
#coms_ser = serial.Serial('/dev/ttyUSB0', 115200);
print ("serial successful");
t = jenga_logic.create_tower();
currentxyz =[];
while(jenga_logic.current_state(t)): #while tower is still up
#t_full = vision_main.get_tower_from_vision();
"""if (t_full[0] != t):
print("tower seen is different from tower stored");
print(t);
print('tower seen is');
print(t_full[0]);
t_full[0] = t;"""
opp_move1 = raw_input('what row are you taking your block from?');
opp_move2 = raw_input("what about the col?");
opp_move3 = raw_input("what row are you placing it to?");
opp_move4 = raw_input("what about the col?");
jenga_logic.make_full_move(t, int(opp_move1)-1, int(opp_move2)-1, int(opp_move3)-1, int(opp_move4)-1);
print("tower is now");
print(t);
next_move = jenga_logic.make_best_move(t);
print ("our move is (in 0 index system)");
print(next_move);
in_str = str(next_move[0]) + ' ';
in_str += str(next_move[1]) + ' ';
in_str += str(next_move[2]) + ' ';
in_str += str(next_move[3]) + ' 0 >waypoints.txt';
in_str = './motorpath '+ in_str;
os.system(in_str);
vision_mode = 'take'
way_point_file = open('./waypoints.txt');
for line in way_point_file:
print (line);
print("line was printed");
inpt = line;
if (line[0] == 'P'):
"""if (currentxyz == []):
assert(False);
startr, startc, endr, endc = next_move;
if (vision_mode == 'take'):
r_goal = startr;
c_goal = startc;
else:
r_goal = endr;
c_goal = endc;
error = vision_main.distance_from_goal(r_goal, c_goal, t_full);
print('error is');
print(error);"""
i = raw_input("correct for error")
if (i != ''):
if (i[0] == 'M'):
print("fixing error");
lin = i;
print(lin);
"""new_goal = [];
for i in range(3):
new_goal.append(current_xyz[i] + error[i]);
lin = [];
for i in range(3):
st = str(new_goal[i]).zfill(5);
lin.append(theta_servo);
lin = ','.join(lin);
lin = 'M,' + lin + '\n';
print('lin is now');
print(lin);
i = raw_input('testing error correction');"""
elif (line[0] == 'M'):
lin = line;
"""inpt = inpt[2::];#get rid of the M,
inpt = inpt.split(',');
theta_goal = int(float(inpt[-1])); #save the theta goal
inpt = inpt[:-1];
inpt = ' '.join(inpt);
cmd_str = "./jenga_test "+inpt;
lin = commands.getoutput(cmd_str);
lin = lin.split(',');
xyz_temp = lin[:-1];
currentxyz = map(int, xyz_temp);
theta_ee = int(float(lin[-1]));
lin = lin[:-1];
theta_servo = theta_goal - theta_ee;
theta_servo = theta_servo + 100;#should be 90
theta_servo = str(theta_servo).zfill(4);
#print(theta_ee);
lin.append(theta_servo);
lin = ','.join(lin);
lin = lin.replace(' ', '');
lin = 'M,' + lin + '\n';"""
else:
lin = line;
print (lin)
print("lin was printed");
time.sleep(1)
ser.setDTR(level=0)
time.sleep(.1)
temp = ser.write(lin);
result = ser.readline();
print result;
print('move was')
print(next_move);
print("tower_is");
way_point_file.close();
jenga_logic.make_full_move(t, next_move[0], next_move[1], next_move[2], next_move[3]);
#coms_test.send_move_ser(coms_ser, next_move);
print (t);
ser.close()
return;
def one_player_main():
ser = serial.Serial('/dev/ttyS0', 9600);
#coms_ser = serial.Serial('/dev/ttyUSB0', 115200);
print ("serial successful");
t = jenga_logic.create_tower();
currentxyz =[];
while(jenga_logic.current_state(t)): #while tower is still up
#t_full = vision_main.get_tower_from_vision();
"""if (t_full[0] != t):
print("tower seen is different from tower stored");
print(t);
print('tower seen is');
print(t_full[0]);
t_full[0] = t;"""
next_move = jenga_logic.make_best_move(t);
print(next_move);
in_str = str(next_move[0]) + ' ';
in_str += str(next_move[1]) + ' ';
in_str += str(next_move[2]) + ' ';
in_str += str(next_move[3]) + ' 0 >waypoints.txt';
in_str = './motorpath '+ in_str;
os.system(in_str);
vision_mode = 'take'
way_point_file = open('./waypoints.txt');
for line in way_point_file:
print (line);
print("line was printed");
inpt = line;
if (line[0] == 'P'):
"""if (currentxyz == []):
assert(False);
startr, startc, endr, endc = next_move;
if (vision_mode == 'take'):
r_goal = startr;
c_goal = startc;
else:
r_goal = endr;
c_goal = endc;
error = vision_main.distance_from_goal(r_goal, c_goal, t_full);
print('error is');
print(error);"""
i = raw_input("correct for error")
if (i != ''):
if (i[0] == 'M'):
print("fixing error");
lin = i;
print(lin);
"""new_goal = [];
for i in range(3):
new_goal.append(current_xyz[i] + error[i]);
lin = [];
for i in range(3):
st = str(new_goal[i]).zfill(5);
lin.append(theta_servo);
lin = ','.join(lin);
lin = 'M,' + lin + '\n';
print('lin is now');
print(lin);
i = raw_input('testing error correction');"""
elif (line[0] == 'M'):
lin = line;
"""inpt = inpt[2::];#get rid of the M,
inpt = inpt.split(',');
theta_goal = int(float(inpt[-1])); #save the theta goal
inpt = inpt[:-1];
inpt = ' '.join(inpt);
cmd_str = "./jenga_test "+inpt;
lin = commands.getoutput(cmd_str);
lin = lin.split(',');
xyz_temp = lin[:-1];
currentxyz = map(int, xyz_temp);
theta_ee = int(float(lin[-1]));
lin = lin[:-1];
theta_servo = theta_goal - theta_ee;
theta_servo = theta_servo + 100;#should be 90
theta_servo = str(theta_servo).zfill(4);
#print(theta_ee);
lin.append(theta_servo);
lin = ','.join(lin);
lin = lin.replace(' ', '');
lin = 'M,' + lin + '\n';"""
else:
lin = line;
print (lin)
print("lin was printed");
time.sleep(1)
ser.setDTR(level=0)
time.sleep(.1)
temp = ser.write(lin);
result = ser.readline();
print result;
print('move was')
print(next_move);
print("tower_is");
way_point_file.close();
jenga_logic.make_full_move(t, next_move[0], next_move[1], next_move[2], next_move[3]);
#coms_test.send_move_ser(coms_ser, next_move);
print (t);
ser.close()
return;
def callibrate():
ser = serial.Serial('/dev/ttyS0', 9600);
print ("serial successful");
while (True):
lin = raw_input("what input should I do?");
print('lin is ' + lin);
if (lin == 'break'):
return
ser.setDTR(level=0)
time.sleep(.1)
temp = ser.write(lin);
result = ser.readline();
print(result);
callibrate()
one_player_main()
#versus_human();
|
Robbbb/Jenga
|
main_first_demo.py
|
Python
|
mit
| 12,229
|
import numpy as np
class SamplingPlannerBase(object):
"""
Defines the basic functionalities that a sampling based planner
should have.
"""
def __init__(self, collision_checker, state_sampler):
pass
def set_goal(self, goal_state):
pass
def set_init(self, init_state):
pass
def plan(self, max_iter):
pass
def get_path(self):
pass
|
yuhangc/planning_algorithms
|
algorithms/sampling_based/planner_base.py
|
Python
|
mit
| 410
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Shop.city'
db.add_column(u'shop_shop', 'city',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, related_name='city_charity', null=True, to=orm['city.City']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Shop.city'
db.delete_column(u'shop_shop', 'city_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'city.city': {
'Meta': {'object_name': 'City'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '10'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '10'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'play.player': {
'Meta': {'object_name': 'Player'},
'city': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'city'", 'null': 'True', 'to': u"orm['city.City']"}),
'experience': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '5', 'decimal_places': '0'}),
'facebook_pic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '4', 'decimal_places': '0'}),
'picture_url': ('django.db.models.fields.CharField', [], {'default': "'/static/img/avatar-1.png'", 'max_length': '400', 'null': 'True'}),
'score': ('django.db.models.fields.DecimalField', [], {'default': '20', 'null': 'True', 'max_digits': '4', 'decimal_places': '0'}),
'token': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'shop.coupon': {
'Meta': {'object_name': 'Coupon'},
'buyers': ('django.db.models.fields.related.ManyToManyField', [], {'default': 'None', 'to': u"orm['play.Player']", 'null': 'True', 'symmetrical': 'False'}),
'coupons_released': ('django.db.models.fields.DecimalField', [], {'default': '10', 'max_digits': '4', 'decimal_places': '0'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '500', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'picture_url': ('django.db.models.fields.CharField', [], {'default': "'/static/img/stanford.png'", 'max_length': '200', 'null': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Shop']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
u'shop.shop': {
'Meta': {'object_name': 'Shop'},
'city': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'city_charity'", 'null': 'True', 'to': u"orm['city.City']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'picture_url': ('django.db.models.fields.CharField', [], {'default': "'/static/img/stanford.png'", 'max_length': '200', 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "'Super shop!'", 'max_length': '100', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['shop']
|
fraferra/PlayCity
|
shop/migrations/0002_auto__add_field_shop_city.py
|
Python
|
mit
| 7,562
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import lua_call
setup(
name="lua_call",
version=lua_call.VERSION,
author="Josiah Carlson",
author_email="josiah.carlson@gmail.com",
url="http://github.com/josiahcarlson/lua-call/",
download_url="http://pypi.python.org/pypi/lua_call/",
py_modules=["lua_call"],
description="Call Lua scripts from other Lua scripts in Redis",
long_description=lua_call.__doc__,
license="MIT",
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
]
)
|
josiahcarlson/lua-call
|
setup.py
|
Python
|
mit
| 860
|
#the following lines will allow you to use buttons and leds
import btnlib as btn
import ledlib as led
import time
#the led.startup() function cycles through the leds
led.startup()
time.sleep(1)
print("All on and off")
#to turn on all leds, use the led.turn_on_all() function:
led.turn_on_all()
time.sleep(2)
#to turn off all:
led.turn_off_all()
time.sleep(1)
print("Orange on and off")
#to turn on a single led, use a command like this:
led.turn_on(led.orange)
#your choices for leds are led.orange, led.blue, led.green, led.red
time.sleep(2)
#to turn it off:
led.turn_off(led.orange)
time.sleep(1)
print("Blue with isOn test")
#the led.isOn(led) function tells you if a particular led is currently on
if led.isOn(led.blue):
print("Blue is on")
else :
print("Blue is off")
time.sleep(2)
led.turn_on(led.blue)
if led.isOn(led.blue):
print("Yellow is on")
else :
print("Blue is off")
time.sleep(5)
led.turn_off(led.blue)
time.sleep(9)
print("Green and purple switch")
#the led.switch(led) function knows whether an led is on or off and switches its value
led.turn_on(led.green)
time.sleep(3)
led.switch(led.green)
led.switch(led.purple)
time.sleep(1)
led.switch(led.purple)
time.sleep(2)
print("If switch is on, press orange for orange and purple for purple")
#the btn.isOn(btn) function tells you if a particular button is being pressed or if a switch is on
#your choices for buttons are currently btn.orange, btn.blue, btn.switch
while btn.isOn(btn.switch) :
if btn.isOn(btn.red):
led.switch(led.orange)
if btn.isOn(btn.yellow) :
led.switch(led.purple)
time.sleep(0.50) #this line keeps it from querying too fast and mistaking a long press for multiple presses
print("Goodbye")
btn.GPIO.cleanup()
|
majikpig/ubtech
|
adrianathomas.py
|
Python
|
mit
| 1,727
|
# -*- coding: utf-8 -*-
"""
WSGI config for Beniamino_Nobile project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "web.settings")
application = get_wsgi_application()
|
beniaminonobile/www2017.beniamino_nobile.it
|
web/wsgi.py
|
Python
|
mit
| 421
|
from Module import AbstractModule
class Module(AbstractModule):
def __init__(self):
AbstractModule.__init__(self)
def run(
self, network, antecedents, out_attributes, user_options, num_cores,
out_path):
import os
from genomicode import filelib
from genomicode import parallel
from Betsy import module_utils
# This this is I/O heavy, don't use so many cores.
MAX_CORES = 2
fastq_node, group_node = antecedents
fastq_path = fastq_node.identifier
sample_group_file = group_node.identifier
filelib.safe_mkdir(out_path)
metadata = {}
module_utils.assert_sample_group_file(sample_group_file, fastq_path)
x = module_utils.read_sample_group_file(group_node.identifier)
x = module_utils.fix_sample_group_filenames(x, fastq_path)
sample_groups = x
# For merging, the order of the files in the sample_group_file
# must be maintainted. Otherwise, will be merged out of order.
# The new files should be named:
# <Sample>.fastq # if single end
# <Sample>_<Pair>.fastq # if paired end
jobs = []
for x in sample_groups:
in_filename, sample, pair = x
#in_filename = os.path.join(fastq_path, file_)
assert os.path.exists(in_filename)
out_file = "%s.fastq" % sample
if pair:
out_file = "%s_%s.fastq" % (sample, pair)
out_filename = os.path.join(out_path, out_file)
x = in_filename, sample, pair, out_filename
jobs.append(x)
out2ins = {} # out_filename -> list of in_filenames
for x in jobs:
in_filename, sample, pair, out_filename = x
if out_filename not in out2ins:
out2ins[out_filename] = []
out2ins[out_filename].append(in_filename)
commands = []
for out_filename, in_filenames in out2ins.iteritems():
# Debugging. Don't merge again if it already exists.
if os.path.exists(out_filename):
continue
args = in_filenames, out_filename
keywds = {}
x = merge_or_symlink_files, args, keywds
commands.append(x)
commands.sort()
nc = min(MAX_CORES, num_cores)
parallel.pyfun(commands, nc)
metadata["num_cores"] = nc
# If the files are paired, make sure they are paired
# correctly.
sample2outfiles = {} # sample -> list of out filenames
for x in jobs:
in_filename, sample, pair, out_filename = x
if sample not in sample2outfiles:
sample2outfiles[sample] = []
if out_filename not in sample2outfiles[sample]:
sample2outfiles[sample].append(out_filename)
commands = []
all_samples = sorted(sample2outfiles)
for sample in all_samples:
out_filenames = sorted(sample2outfiles[sample])
if len(out_filenames) == 1:
continue
# Make sure they are aligned.
x = check_fastq_alignment, (sample, out_filenames), {}
commands.append(x)
commands.sort()
retvals = parallel.pyfun(commands, nc)
assert len(retvals) == len(commands)
errors = [x for x in retvals if x]
assert not errors, "\n".join(errors)
return metadata
def name_outfile(self, antecedents, user_options):
return "merged.fastq"
def merge_or_symlink_files(in_filenames, out_filename):
# If only 1 file, then just symlink it rather than copy.
# out_filename must not exist.
import os
from genomicode import filelib
CHUNK_SIZE = 1024*1024
assert not os.path.exists(out_filename)
# If only one file, and it's not compressed, then just symlink it.
if len(in_filenames) == 1:
in_filename = in_filenames[0]
x, ext = os.path.splitext(in_filename)
if ext.lower() in [".fa", ".fasta"]:
os.symlink(in_filename, out_filename)
return
# Create an empty outfile that I can append to.
open(out_filename, 'w')
# Append the files in order.
for in_filename in in_filenames:
in_handle = filelib.openfh(in_filename)
out_handle = open(out_filename, 'a')
while True:
x = in_handle.read(CHUNK_SIZE)
if not x:
break
out_handle.write(x)
def check_fastq_alignment(sample, fastq_filenames):
from genomicode import genomelib
# Only compare up to the first space.
# @NS500195:257:HJ2F7BGXY:1:11101:19771:1064 1:N:0:GTACGCAA
# @NS500195:257:HJ2F7BGXY:1:11101:19771:1064 2:N:0:GTACGCAA
alignment = None
iters = [genomelib.read_fastq(x) for x in fastq_filenames]
nit = 0
while alignment is None:
fastq_recs = [None] * len(iters)
stopped = [False] * len(iters)
for i in range(len(iters)):
try:
fastq_recs[i] = iters[i].next()
except StopIteration, x:
stopped[i] = True
# If all the files are done, then stop this.
if stopped == [True]*len(iters):
break
# If some of the files are done, then they are not aligned.
# Different lengths.
if True in stopped:
alignment = "Files different lengths: %s" % sample
break
# Pull out the titles.
x = [x[0] for x in fastq_recs]
x = [x.split()[0] for x in x]
titles = x
# Make sure each of the titles are the same.
for i in range(1, len(titles)):
if titles[i] != titles[0]:
alignment = "Title mismatch: %s %s %s" % (
sample, titles[0], titles[i])
nit += 1
return alignment
|
jefftc/changlab
|
Betsy/Betsy/modules/merge_reads.py
|
Python
|
mit
| 5,930
|
# pieces by height
raw = """
1 2 211 2 2
11 21 1 11 1
111 1 1 11 """.lstrip()
raw_pieces = ["\n".join(line[4*i:4*i+3] for line in raw.splitlines()) for i in range(5)]
import numpy
def from_heights(s):
"""Construct 3d array from 2d string of heights"""
lines = s.splitlines()
shape = numpy.zeros((3,3,3), int)
for y, line in enumerate(lines):
for x, h in enumerate(line):
try:
h = int(h)
except ValueError:
continue
for z in range(h):
shape[x,y,z] = 1
return shape
pieces = [from_heights(s) for s in raw_pieces]
# to do: sort largest to smallest
def rotations24(polycube):
# imagine shape is pointing in axis 0 (up)
# 4 rotations about axis 0
yield from rotations4(polycube, 0)
# rotate 180 about axis 1, now shape is pointing down in axis 0
# 4 rotations about axis 0
yield from rotations4(rot90(polycube, 2, axis=1), 0)
# rotate 90 or 270 about axis 1, now shape is pointing in axis 2
# 8 rotations about axis 2
yield from rotations4(rot90(polycube, axis=1), 2)
yield from rotations4(rot90(polycube, -1, axis=1), 2)
# rotate about axis 2, now shape is pointing in axis 1
# 8 rotations about axis 1
yield from rotations4(rot90(polycube, axis=2), 1)
yield from rotations4(rot90(polycube, -1, axis=2), 1)
def rotations4(polycube, axis):
"""List the four rotations of the given cube about the given axis."""
for i in range(4):
yield rot90(polycube, i, axis)
def rot90(m, k=1, axis=2):
"""Rotate an array by 90 degrees in the counter-clockwise direction around the given axis"""
return numpy.swapaxes(numpy.rot90(numpy.swapaxes(m, 2, axis), k), 2, axis)
def solve(progress, pieces):
if not pieces:
# done
return []
for possible in pieces[0]:
if numpy.max(progress + possible) > 1:
continue
attempt = solve(progress + possible, pieces[1:])
if attempt != False:
return [possible] + attempt
return False
def transformations(polycube):
"""List all transformations (rotations and translations) of polycube"""
transforms = list()
for translation in translations(polycube):
transforms.extend(rotations24(translation))
return distinct(transforms)
def distinct(arrays):
"""Distinct elements from list of arrays"""
distinct = list()
for M in arrays:
if any(numpy.array_equal(M, N) for N in distinct):
continue
distinct.append(M)
return distinct
def translations(polycube):
"""List all translation of given cube within 3x3x3 grid"""
assert polycube[0,0,0] != 0 # so don't have to bother with negative translations
extents = tuple(coords.max() for coords in numpy.nonzero(polycube))
for x in range(3-extents[0]):
for y in range(3-extents[1]):
for z in range(3-extents[2]):
yield numpy.roll(numpy.roll(numpy.roll(polycube, x, 0), y, 1), z, 2)
def pretty(solution):
return sum((i+1) * M for (i, M) in enumerate(solution))
if __name__ == "__main__":
two = numpy.array([[[1, 0],
[1, 0]],
[[1, 1],
[0, 0]]])
three = numpy.array([[[1, 1, 0],
[1, 1, 0],
[0, 0, 0]],
[[0, 0, 0],
[1, 0, 0],
[1, 0, 0]],
[[0, 0, 0],
[0, 0, 0],
[0, 0, 0]]])
transformations_by_piece = [transformations(piece) for piece in pieces]
solution = solve(numpy.zeros((3,3,3),int), transformations_by_piece)
print(pretty(solution))
|
hickford/soma-cube-solver
|
soma-cube-solver.py
|
Python
|
mit
| 3,625
|
"""
This module allows to manipulate binary packed objects as classes.
Similarly to the standard library struct module, objects can be or unpacked.
"""
import struct
__author__ = 'David Berthelot'
def StructClass(name, format, fields, defaults={}):
"""
:name: The name of the class to create
:format: The struct format (refer to the standard struct module documentation)
:fields: The list of field names corresponding to the struct entries
:defaults: An optional dictionary of default value for some or all of the fields
"""
class StructClass(object):
_FORMAT = format
_FIELDS = fields
def __init__(self, **entries):
for field in self._FIELDS:
self.__dict__[field] = entries.get(field, defaults.get(field))
def pack(self):
return struct.pack(self._FORMAT, *([self.__dict__[field] for field in self._FIELDS]))
@classmethod
def unpack(cls, input_bytes):
"""Unpacks input bytes, returns the unpacked object and the remaining bytes from the input"""
packsize = struct.calcsize(cls._FORMAT)
entries = {field: value for field, value in zip(cls._FIELDS, struct.unpack(cls._FORMAT, input_bytes[:packsize]))}
return cls(**entries), input_bytes[packsize:]
def __repr__(self):
return self.__class__.__name__+'('+', '.join([field+'='+repr(self.__dict__[field]) for field in self._FIELDS])+')'
StructClass.__name__ = name
return StructClass
|
david-berthelot/StructClass
|
structclass/StructClass.py
|
Python
|
mit
| 1,541
|
class Node(object):
"""
Constructor to initialize the Node object.
"""
def __init__(self, val, next=None):
self.val = val
self.next = next
class LinkedList(object):
"""
Constructor to initialize the List object.
"""
def __init__(self):
self.head = None
self.tail = None
self.length = 0
def add_to_head(self, val):
if self.head is None:
self.head = self.tail = Node(val)
else:
self.head = Node(val, self.head)
self.length += 1
def add_to_tail(self, val):
if self.tail is None:
self.head = self.tail = Node(val)
else:
self.tail.next = Node(val)
self.tail = self.tail.next
self.length += 1
def delete_from_head(self):
removed = self.head
if self.head is not None:
self.head = self.head.next
self.length -= 1
if self.length == 0:
self.tail = None
return removed
def insert_in_sorted(self, val):
if self.head is None:
self.head = self.tail = Node(val)
elif self.head.val >= val:
self.head = Node(val, self.head)
else:
cur = self.head
while cur.next and cur.next.val < val:
cur = cur.next
cur.next = Node(val, cur.next)
if cur.next.next is None:
self.tail = cur.next
self.length += 1
def print_list(self):
result = []
cur = self.head
while cur:
result.append(cur.val)
cur = cur.next
return result
def sum_2_helper(cur1, cur2, llist):
if not cur1.next and not cur2.next:
sum_ = cur1.val + cur2.val
if sum_ >= 10:
llist.add_to_head(Node(sum_ - 10))
llist.add_to_head(Node(1))
return llist
else:
sum_ = cur1.val + cur2.val
result = sum_2_helper(cur1.next, cur2.next, llist)
if result.head.val == 1:
sum_ += 1
if sum_ >= 10:
result.head.val = sum_ - 10
new_head = Node(1)
new_head.next = result.head
result.head = new_head
else:
result.head.val = sum_
else:
new_head = Node(sum_)
new_head.next = result.head
result.head = new_head
return result
def sum_2_lists(l1, l2):
cur1 = l1.head
cur2 = l2.head
llist = LinkedList()
result = sum_2_helper(cur1, cur2, llist)
return result
|
tanyaweaver/code-katas
|
src/insert_in_sorted_ll.py
|
Python
|
mit
| 2,551
|
# global variables
import libtcodpy as lib
from bearlibterminal import terminal
from Classes.Entity import Entity
from Classes.Tile import Tile
from Classes.Fighter import Fighter
from helpers.player_death import player_death
SCREEN_WIDTH = 80
SCREEN_HEIGHT = 50
MAP_WIDTH = 80
MAP_HEIGHT = 43
PANEL_HEIGHT = 7
PANEL_Y = SCREEN_HEIGHT - PANEL_HEIGHT
BAR_WIDTH = 20
MSG_X = BAR_WIDTH + 2
MSG_WIDTH = SCREEN_WIDTH - BAR_WIDTH - 2
MSG_HEIGHT = PANEL_HEIGHT - 1
# create list of game messages and colors, starts empty
game_msgs = []
ROOM_MAX_SIZE = 10
ROOM_MIN_SIZE = 6
MAX_ROOMS = 30
FOV_ALGO = 0 # default FOV algorithm
FOV_LIGHT_WALLS = True
TORCH_RADIUS = 10
inventory = []
INVENTORY_WIDTH = 50
HEAL_AMOUNT = 40
LIGHTNING_DAMAGE = 40
LIGHTNING_RANGE = 5
CONFUSE_NUM_TURNS = 10
CONFUSE_RANGE = 8
FIREBALL_RADIUS = 3
FIREBALL_DAMAGE = 25
# experience and level-ups
LEVEL_UP_BASE = 200
LEVEL_UP_FACTOR = 150
LEVEL_SCREEN_WIDTH = 40
# character screen
CHARACTER_SCREEN_WIDTH = 30
# Entities
fighter_component = None
player = None
stairs = None
entities = None
dungeon_level = None
# map stuff
map = None
fov_map = None
fov_recompute = True
# bsp map stuff
DEPTH = 10
MIN_SIZE = 5
FULL_ROOMS = True
bsp_rooms = []
game_state = 'playing'
player_action = None
# options
old_school_tiles = False
bsp_map_gen = False
graphical_tiles = False
# character
player_char = '@' # '[0xE02C]'
|
nuzcraft/RLTut
|
helpers/variables.py
|
Python
|
mit
| 1,399
|
# -*- coding: utf-8 -*-
import cv2
import numpy as np
import math
from collections import deque
__author__ = 'ecialo'
def cut(size):
def f(frame):
h, w = frame.shape
return frame[(h-size)/2:(h+size)/2, (w-size)/2:(w+size)/2]
return f
def resize_and_cut(size, scale):
def f(frame):
h, w = frame.shape
h, w = int(h*scale), int(w*scale)
new_frame = cv2.resize(frame, (w, h))
result = new_frame[(h-size)/2:(h+size)/2, (w-size)/2:(w+size)/2]
return result
return f
def make_rolling_frame(size, roughness):
def f(frames):
head = np.vstack([frame[roughness*i:roughness*(i+1), :] for i, frame in enumerate(frames)])
try:
h, w = head.shape
except ValueError:
head = head.reshape((roughness, head.shape[0]))
h, w = head.shape
tail = np.zeros((size - h, w), head.dtype)
res = np.concatenate((head, tail))
return res
return f
def add_frame_to_frames(size):
def f(frames, frame):
if len(frames) >= size:
frames.popleft()
frames.append(frame)
return f
def configure_tools(frame, roughness, scale, maxsize, framerate, result_filename):
maxsize = maxsize or int(min(frame.shape)*scale)
stack_size = int(math.ceil(float(maxsize)/roughness))
appender = add_frame_to_frames(stack_size)
composer = make_rolling_frame(maxsize, roughness)
cutter = resize_and_cut(maxsize, scale)
r, g, b = deque(), deque(), deque()
result_video = cv2.VideoWriter(
result_filename,
cv2.cv.FOURCC(*'mp4v'),
# cv2.cv.FOURCC(*'xvid'),
# cv2.cv.FOURCC(*'MJPG'),
# cv2.cv.FOURCC('P','I','M','1'),
framerate,
(maxsize, maxsize), # Ломается при неквадратных видео из-за бага в opencv
)
return maxsize, appender, composer, cutter, r, g, b, result_video
def unblockable_rolling_shutter(filename, roughness=1, scale=1.0, maxsize=None, framerate=25, result_filename=None):
"""
Накладывает на видео эффект rolling shatter и сохраняет результат
Каждая строка из roughness пикселей каждого кадра обработанного виде опережает предыдущую строку на 1 кадр.
:param filename: имя файла с видео
:type filename: str
:param roughness: размер фрагмента задержки в строках
:type roughness: int
:param scale: пространственный коэфициент сжатия кадров
:type scale: float
:param maxsize: максимальный размер кадра
:type maxsize: int
:return:
:rtype:
"""
video = cv2.VideoCapture(filename)
result_filename = result_filename or filename.split(".")[0] + "_lagged.avi"
result_video = None
r, g, b = None, None, None
is_configured = False
frame_counter = 0
cutter, appender, composer = None, None, None
while True:
is_success, frame = video.read()
frame_counter += 1
if frame_counter % 100 == 0:
print frame_counter
if is_success:
channels = cv2.split(frame)
if not is_configured:
maxsize, appender, composer, cutter, r, g, b, result_video = configure_tools(
channels[0], roughness, scale, maxsize, framerate, result_filename
)
is_configured = True
new_b, new_g, new_r = map(cutter, channels)
appender(b, new_b)
appender(g, new_g)
appender(r, new_r)
new_frame = cv2.merge([
composer(b),
composer(g),
composer(r)
])
result_video.write(new_frame)
yield
else:
break
for i in xrange(int(math.ceil(float(maxsize)/roughness))):
frame_counter += 1
if frame_counter % 100 == 0:
print "+"
appender(b, new_b)
appender(g, new_g)
appender(r, new_r)
new_frame = cv2.merge([
composer(b),
composer(g),
composer(r)
])
result_video.write(new_frame)
yield
video.release()
result_video.release()
def rolling_shutter(filename, roughness=1, scale=1.0, maxsize=None, framerate=25, result_filename=None):
reduce(lambda _, __: None, unblockable_rolling_shutter(filename, roughness, scale, maxsize, framerate, result_filename), None)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("filename")
parser.add_argument("-r", "--roughness", type=int, default=1)
parser.add_argument("-s", "--scale", type=float, default=1.0)
parser.add_argument("-m", "--maxsize", type=int, default=None)
parser.add_argument("-f", "--framerate", type=int, default=25),
parser.add_argument("-rf", "--result_filename", default=None)
args = parser.parse_args()
# print args
rolling_shutter(**args.__dict__)
|
Ecialo/imsoflexible
|
rolling_shutter.py
|
Python
|
mit
| 5,186
|
from os import path
class ConfigReader:
"""Base class for configuration reading. Uses standard python configReader."""
__CONFIG_LIST_SEPARATOR = ','
DEFAULT_SECTION = "DEFAULT"
def __init__(self, config, section_name=None, strip_properties=False):
"""Initializes instalce of Config Reader that wraps python configReader
If no section_name is provided then default section is used
"""
self.__config = config
self.__section_name = section_name
self.__strip_properties = strip_properties
if not self.__section_name:
self.__section_name = ConfigReader.DEFAULT_SECTION
def get_config_property(self, property_name):
"""Gets property from underlying configReader.
Performs stripping if needed
:param property_name: property name in *.ini file
:return: string with value for the property
"""
return self.__strip_property(self.__config[self.__section_name][property_name])
def get_config_property_value_list(self, property_name):
"""Gets list for the given property_name.
:param property_name: property name in *.ini file
:return: Stripped list of values separated by __CONFIG_LIST_SEPARATOR
"""
value = self.get_config_property(property_name)
if not value:
raise IndexError('Error when reading configuration file')
return [self.__strip_property(x) for x in value.split(ConfigReader.__CONFIG_LIST_SEPARATOR)]
def __strip_property(self, txt):
return txt.strip() if self.__strip_properties else txt
class ResourcesReader(ConfigReader):
__RESOURCES_FOLDER = "resourcesFolderName"
def __init__(self, game_folder, config, section_name):
ConfigReader.__init__(self, config, section_name, True)
self.__game_folder = path.join(game_folder,
config[ConfigReader.DEFAULT_SECTION][ResourcesReader.__RESOURCES_FOLDER])
@property
def resources_folder(self):
return self.__game_folder
|
Superzer0/pyRiverRaid
|
objects/resources/ConfigReader.py
|
Python
|
mit
| 2,072
|
# -*- coding: UTF-8 -*-
__author__ = 'ARA'
__all__ = ['computeLocalID', 'computeGlobalID']
import numpy as np
# ...
def initLocalID(faces, n, base):
dim = len(n)
if dim == 1:
return initLocalID_1D(faces, n, base)
if dim == 2:
return initLocalID_2D(faces, n, base)
def initLocalID_1D(faces, n, base):
id =- np.ones(n, dtype=np.int)
dim = len(n)
ib = 0 ; ie = n[0]-1
for f in faces:
if f==0:
ib += 1
if f==1:
ie -= 1
for i in range(ib,ie+1):
A = i - ib
id[i] = A + base
id += 1
base += ie - ib + 1
return id, base
def initLocalID_2D(faces, n, base):
id =- np.ones(n, dtype=np.int)
dim = len(n)
ib = 0 ; ie = n[0]-1
jb = 0 ; je = n[1]-1
for f in faces:
if f==0:
jb += 1
if f==2:
je -= 1
if f==1:
ib += 1
if f==3:
ie -= 1
ne = ie - ib + 1
# print "ib,ie = ", ib,ie
# print "jb,je = ", jb,je
for j in range(jb,je+1):
for i in range(ib,ie+1):
A = ( j - jb ) * ne + i - ib
id[i,j] = A + base
id += 1
base += ( je - jb ) * ne + ie - ib + 1
return id, base
# ...
def print_id(id):
dim = len(id.shape)
if dim ==1:
print_id_1D(id)
if dim ==2:
print_id_2D(id)
def print_id_1D(id):
id_ = np.zeros_like(id)
n, = id.shape
for i in range(0,n):
id_[i] = id[i]
print(id_.transpose())
def print_id_2D(id):
id_ = np.zeros_like(id)
n,m = id.shape
for j in range(0,m):
for i in range(0,n):
id_[i,j] = id[i,-j-1]
print(id_.transpose())
print(id.transpose().reshape(id.size))
# ...
# ...
def isDuplicata(patch_id, face, DuplicataPatchs):
for data in DuplicataPatchs:
if (data[0]==patch_id) and (data[1]==face):
return True
return False
# ...
# ...
def get_ij_1D(n, f):
if f==0:
list_i = [0]
if f==1:
list_i = [n[0]-1]
return list_i
def get_ij_2D(n, f):
if f==0:
list_i = list(range(0, n[0]))
list_j = [0] * n[0]
if f==1:
list_i = [0] * n[1]
list_j = list(range(0, n[1]))
if f==2:
list_i = list(range(0, n[0]))
list_j = [n[1] - 1] * n[0]
if f==3:
list_i = [n[0] - 1] * n[1]
list_j = list(range(0, n[1]))
return list_i, list_j
# ...
def updateDuplicated_1D(n_m, n_s, list_id, p_m, f_m, p_s, f_s):
"""
p_m : master patch
f_m : master face
p_s : slave patch
f_s : slave face
"""
list_i_m = get_ij_1D(n_m, f_m)
list_i_s = get_ij_1D(n_s, f_s)
for (i_m,i_s) in zip(list_i_m, list_i_s):
id_s = list_id[p_s]
id_m = list_id[p_m]
id_s[i_s] = id_m[i_m]
return list_id
def updateDuplicated_2D(n_m, n_s, list_id, p_m, f_m, p_s, f_s):
"""
p_m : master patch
f_m : master face
p_s : slave patch
f_s : slave face
"""
list_i_m, list_j_m = get_ij_2D(n_m, f_m)
list_i_s, list_j_s = get_ij_2D(n_s, f_s)
for (i_m,j_m,i_s,j_s) in zip(list_i_m, list_j_m, list_i_s, list_j_s):
id_s = list_id[p_s]
id_m = list_id[p_m]
id_s[i_s,j_s] = id_m[i_m,j_m]
# print ">> ",i_m, j_m, id_m[i_m,j_m]
# print id_s
return list_id
def updateDuplicated(n_m, n_s, list_id, p_m, f_m, p_s, f_s):
dim = len(n_m)
if dim == 1:
return updateDuplicated_1D(n_m, n_s, list_id, p_m, f_m, p_s, f_s)
if dim == 2:
return updateDuplicated_2D(n_m, n_s, list_id, p_m, f_m, p_s, f_s)
# ...
# ...
def computeLocalID(list_n, DirFaces, DuplicatedFaces, DuplicataFaces):
dim = len(list_n[0])
npatchs = len(list_n)
AllFaces = list(range(0,2 * dim))
AllPatchs = list(range(0,npatchs))
BasePatchs = [0]
DuplicatedPatchs = list(np.unique(np.array([data[0] for data in DuplicatedFaces])))
DuplicataPatchs = list(np.unique(np.array([data[0] for data in DuplicataFaces])))
base = 0
list_id = []
for i in range(0, npatchs):
list_id.append([])
for patch_id,faces in enumerate(DirFaces):
_faces = [f for f in faces]
# ... mettre a jour faces, en rajoutant les faces dupliquees
if patch_id in DuplicataPatchs:
list_faces = [f for f in AllFaces if f not in faces]
for f in list_faces:
if isDuplicata(patch_id, f, DuplicataFaces):
_faces.append(f)
id, base = initLocalID(_faces, list_n[patch_id], base)
list_id[patch_id] = id
# print "-------------- INIT ------------------"
# for i,id in enumerate(list_id):
# print "...... patch id : ", i, " ......"
# print_id(id)
for data_m, data_s in zip(DuplicatedFaces, DuplicataFaces):
p_m = data_m[0] ; f_m = data_m[1]
p_s = data_s[0] ; f_s = data_s[1]
n_m = list_n[p_m] ; n_s = list_n[p_s]
list_id = updateDuplicated(n_m, n_s, list_id, p_m, f_m, p_s, f_s)
return list_id
# ...
# ...
def computeGlobalID(list_id):
ID = []
for id in list_id:
ID += list(id.transpose().reshape(id.size))
return ID
# ...
if __name__ == '__main__':
if False:
from time import time
t_start = time()
PRINT = True
# PRINT = False
## list_n = [[4]]*3
# list_n = [[1024]]*3
# DirFaces = [[0],[],[1]]
# DuplicatedFaces = [[0,1],[1,1]]
# DuplicataFaces = [[1,0],[2,0]]
list_n = [[3,3]]*4
# list_n = [[1024,1024]]*4
DirFaces = [[1,2],[2,3],[0,3],[0,1]]
DuplicatedFaces = [[0,3],[1,0],[2,1],[0,0]]
DuplicataFaces = [[1,1],[2,2],[3,3],[3,2]]
list_id = computeLocalID(list_n, DirFaces, DuplicatedFaces, DuplicataFaces)
ID = computeGlobalID(list_id)
if PRINT :
print("-------------- FINAL ------------------")
for i,id in enumerate(list_id):
print("...... patch id : ", i, " ......")
print_id(id)
print("-------------- ID ------------------")
print(ID)
t_end = time()
print("Elapsed time ", t_end - t_start)
|
ratnania/pigasus
|
python/fem/idutils.py
|
Python
|
mit
| 6,230
|
N = int(input())
ans = 0
for i in range(N):
sh, sm, eh, em = map(int, input().replace(':', ' ').split())
delta = (eh*60 + em) - (sh*60 + sm)
if delta < 0: delta += 24*60
ans += delta
print(ans)
|
knuu/competitive-programming
|
yukicoder/yuki070.py
|
Python
|
mit
| 210
|
# -*- coding: utf-8 -*-
"""Read encoder and print position value to LCD."""
from machine import sleep_ms
from pyb_encoder import Encoder
from hd44780 import HD44780
class STM_LCDShield(HD44780):
_default_pins = ('PD2', 'PD1', 'PD6', 'PD5', 'PD4', 'PD3')
def main():
lcd.set_string("Value: ")
lastval = 0
while True:
val = enc.value
if lastval != val:
lastval = val
lcd.set_cursor(6, 0)
for c in "%3i" % val:
lcd.send_byte(c)
enc.cur_accel = max(0, enc.cur_accel - enc.accel)
sleep_ms(50)
if __name__ == '__main__':
lcd = STM_LCDShield()
enc = Encoder('A0', 'A1', max_value=999, accel=5)
main()
|
SpotlightKid/micropython-stm-lib
|
encoder/examples/encoder_lcd.py
|
Python
|
mit
| 715
|
import _plotly_utils.basevalidators
class ShapeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="shape", parent_name="scattercarpet.line", **kwargs):
super(ShapeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["linear", "spline"]),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/scattercarpet/line/_shape.py
|
Python
|
mit
| 514
|
# MIT License
#
# Copyright (C) IBM Corporation 2019
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Base classes for differential privacy mechanisms.
"""
import abc
from copy import copy
import inspect
from numbers import Real
import secrets
class DPMachine(abc.ABC):
"""
Parent class for :class:`.DPMechanism` and :class:`.DPTransformer`, providing and specifying basic functionality.
"""
@abc.abstractmethod
def randomise(self, value):
"""Randomise `value` with the mechanism.
Parameters
----------
value : int or float or str or method
The value to be randomised.
Returns
-------
int or float or str or method
The randomised value, same type as `value`.
"""
def copy(self):
"""Produces a copy of the class.
Returns
-------
self : class
Returns the copy.
"""
return copy(self)
class DPMechanism(DPMachine, abc.ABC):
r"""Abstract base class for all mechanisms. Instantiated from :class:`.DPMachine`.
Parameters
----------
epsilon : float
Privacy parameter :math:`\epsilon` for the mechanism. Must be in [0, ∞].
delta : float
Privacy parameter :math:`\delta` for the mechanism. Must be in [0, 1]. Cannot be simultaneously zero with
``epsilon``.
"""
def __init__(self, *, epsilon, delta):
self.epsilon, self.delta = self._check_epsilon_delta(epsilon, delta)
self._rng = secrets.SystemRandom()
def __repr__(self):
attrs = inspect.getfullargspec(self.__class__).kwonlyargs
attr_output = []
for attr in attrs:
attr_output.append(attr + "=" + repr(self.__getattribute__(attr)))
return str(self.__module__) + "." + str(self.__class__.__name__) + "(" + ", ".join(attr_output) + ")"
@abc.abstractmethod
def randomise(self, value):
"""Randomise `value` with the mechanism.
Parameters
----------
value : int or float or str or method
The value to be randomised.
Returns
-------
int or float or str or method
The randomised value, same type as `value`.
"""
def bias(self, value):
"""Returns the bias of the mechanism at a given `value`.
Parameters
----------
value : int or float
The value at which the bias of the mechanism is sought.
Returns
-------
bias : float or None
The bias of the mechanism at `value` if defined, `None` otherwise.
"""
raise NotImplementedError
def variance(self, value):
"""Returns the variance of the mechanism at a given `value`.
Parameters
----------
value : int or float
The value at which the variance of the mechanism is sought.
Returns
-------
bias : float or None
The variance of the mechanism at `value` if defined, `None` otherwise.
"""
raise NotImplementedError
def mse(self, value):
"""Returns the mean squared error (MSE) of the mechanism at a given `value`.
Parameters
----------
value : int or float
The value at which the MSE of the mechanism is sought.
Returns
-------
bias : float or None
The MSE of the mechanism at `value` if defined, `None` otherwise.
"""
return self.variance(value) + (self.bias(value)) ** 2
@classmethod
def _check_epsilon_delta(cls, epsilon, delta):
if not isinstance(epsilon, Real) or not isinstance(delta, Real):
raise TypeError("Epsilon and delta must be numeric")
if epsilon < 0:
raise ValueError("Epsilon must be non-negative")
if not 0 <= delta <= 1:
raise ValueError("Delta must be in [0, 1]")
if epsilon + delta == 0:
raise ValueError("Epsilon and Delta cannot both be zero")
return float(epsilon), float(delta)
def _check_all(self, value):
del value
self._check_epsilon_delta(self.epsilon, self.delta)
return True
class TruncationAndFoldingMixin:
"""Mixin for truncating or folding the outputs of a mechanism. Must be instantiated with a :class:`.DPMechanism`.
Parameters
----------
lower : float
The lower bound of the mechanism.
upper : float
The upper bound of the mechanism.
"""
def __init__(self, *, lower, upper):
if not isinstance(self, DPMechanism):
raise TypeError("TruncationAndFoldingMachine must be implemented alongside a :class:`.DPMechanism`")
self.lower, self.upper = self._check_bounds(lower, upper)
@classmethod
def _check_bounds(cls, lower, upper):
"""Performs a check on the bounds provided for the mechanism."""
if not isinstance(lower, Real) or not isinstance(upper, Real):
raise TypeError("Bounds must be numeric")
if lower > upper:
raise ValueError("Lower bound must not be greater than upper bound")
return lower, upper
def _check_all(self, value):
"""Checks that all parameters of the mechanism have been initialised correctly"""
del value
self._check_bounds(self.lower, self.upper)
return True
def _truncate(self, value):
if value > self.upper:
return self.upper
if value < self.lower:
return self.lower
return value
def _fold(self, value):
if value < self.lower:
return self._fold(2 * self.lower - value)
if value > self.upper:
return self._fold(2 * self.upper - value)
return value
def bernoulli_neg_exp(gamma, rng=None):
"""Sample from Bernoulli(exp(-gamma)).
Adapted from "The Discrete Gaussian for Differential Privacy", Canonne, Kamath, Steinke, 2020.
https://arxiv.org/pdf/2004.00010v2.pdf
Parameters
----------
gamma : float
Parameter to sample from Bernoulli(exp(-gamma)). Must be non-negative.
rng : Random number generator, optional
Random number generator to use. If not provided, uses SystemRandom from secrets by default.
Returns
-------
One sample from the Bernoulli(exp(-gamma)) distribution.
"""
if gamma < 0:
raise ValueError(f"Gamma must be non-negative, got {gamma}.")
if rng is None:
rng = secrets.SystemRandom()
while gamma > 1:
gamma -= 1
if not bernoulli_neg_exp(1, rng):
return 0
counter = 1
while rng.random() <= gamma / counter:
counter += 1
return counter % 2
|
IBM/differential-privacy-library
|
diffprivlib/mechanisms/base.py
|
Python
|
mit
| 7,783
|
"""Create simple maximum dbz composites for a given UTC date
"""
import datetime
import os
import time
import sys
import subprocess
import requests
import osgeo.gdal as gdal
from osgeo import gdalconst
import numpy as np
from pyiem.util import get_dbconn, logger, utc
LOG = logger()
PGCONN = get_dbconn("mesosite", user="nobody")
CURSOR = PGCONN.cursor()
URLBASE = "http://iem.local/GIS/radmap.php?width=1280&height=720&"
def get_colortable(prod):
"""Get the color table for this prod
Args:
prod (str): product to get the table for
Returns:
colortable
"""
CURSOR.execute(
"select r,g,b from iemrasters_lookup l JOIN iemrasters r on "
"(r.id = l.iemraster_id) WHERE r.name = %s ORDER by l.coloridx ASC",
("composite_" + prod,),
)
ct = gdal.ColorTable()
for i, row in enumerate(CURSOR):
ct.SetColorEntry(i, (row[0], row[1], row[2], 255))
return ct
def run(prod, sts):
"""Create a max dbZ plot
Args:
prod (str): Product to run for, either n0r or n0q
sts (datetime): date to run for
"""
ets = sts + datetime.timedelta(days=1)
interval = datetime.timedelta(minutes=5)
n0rct = get_colortable(prod)
# Loop over our archived files and do what is necessary
maxn0r = None
now = sts
while now < ets:
fn = now.strftime(
(
"/mesonet/ARCHIVE/data/%Y/%m/%d/"
"GIS/uscomp/" + prod + "_%Y%m%d%H%M.png"
)
)
if not os.path.isfile(fn):
LOG.info("missing file: %s", fn)
now += interval
continue
n0r = gdal.Open(fn, 0)
n0rd = n0r.ReadAsArray()
LOG.debug(
"%s %s %s %s", now, n0rd.dtype, np.shape(n0rd), n0r.RasterCount
)
if maxn0r is None:
maxn0r = n0rd
maxn0r = np.where(n0rd > maxn0r, n0rd, maxn0r)
now += interval
out_driver = gdal.GetDriverByName("gtiff")
outdataset = out_driver.Create(
"max.tiff",
n0r.RasterXSize,
n0r.RasterYSize,
n0r.RasterCount,
gdalconst.GDT_Byte,
)
# Set output color table to match input
outdataset.GetRasterBand(1).SetRasterColorTable(n0rct)
outdataset.GetRasterBand(1).WriteArray(maxn0r)
del outdataset
subprocess.call("convert max.tiff max.png", shell=True)
# Insert into LDM
cmd = (
"pqinsert -p 'plot a %s0000 bogus GIS/uscomp/max_%s_0z0z_%s.png "
"png' max.png"
) % (sts.strftime("%Y%m%d"), prod, sts.strftime("%Y%m%d"))
subprocess.call(cmd, shell=True)
# Create tmp world file
wldfn = "/tmp/tmpwld%s.wld" % (sts.strftime("%Y%m%d"),)
out = open(wldfn, "w")
if prod == "n0r":
out.write(
"""0.01
0.0
0.0
-0.01
-126.0
50.0"""
)
else:
out.write(
"""0.005
0.0
0.0
-0.005
-126.0
50.0"""
)
out.close()
# Insert world file as well
cmd = (
"pqinsert -i -p 'plot a %s0000 bogus GIS/uscomp/max_%s_0z0z_%s.wld "
"wld' %s"
) % (sts.strftime("%Y%m%d"), prod, sts.strftime("%Y%m%d"), wldfn)
subprocess.call(cmd, shell=True)
# cleanup
os.remove("max.tiff")
os.remove("max.png")
os.remove(wldfn)
# Sleep for a bit
time.sleep(60)
# Iowa
png = requests.get(
"%slayers[]=uscounties&layers[]=%s&ts=%s"
% (
URLBASE,
"nexrad_tc" if prod == "n0r" else "n0q_tc",
sts.strftime("%Y%m%d%H%M"),
)
)
fp = open("tmp.png", "wb")
fp.write(png.content)
fp.close()
cmd = (
"pqinsert -p 'plot ac %s0000 summary/max_%s_0z0z_comprad.png "
"comprad/max_%s_0z0z_%s.png png' tmp.png"
) % (sts.strftime("%Y%m%d"), prod, prod, sts.strftime("%Y%m%d"))
subprocess.call(cmd, shell=True)
# US
png = requests.get(
("%ssector=conus&layers[]=uscounties&" "layers[]=%s&ts=%s")
% (
URLBASE,
"nexrad_tc" if prod == "n0r" else "n0q_tc",
sts.strftime("%Y%m%d%H%M"),
)
)
fp = open("tmp.png", "wb")
fp.write(png.content)
fp.close()
cmd = (
"pqinsert -p 'plot ac %s0000 summary/max_%s_0z0z_usrad.png "
"usrad/max_%s_0z0z_%s.png png' tmp.png"
) % (sts.strftime("%Y%m%d"), prod, prod, sts.strftime("%Y%m%d"))
subprocess.call(cmd, shell=True)
os.remove("tmp.png")
def main(argv):
"""Run main()"""
# Default is to run for yesterday
ts = utc() - datetime.timedelta(days=1)
ts = ts.replace(hour=0, minute=0, second=0, microsecond=0)
if len(argv) == 4:
ts = ts.replace(
year=int(argv[1]), month=int(argv[2]), day=int(argv[3])
)
for prod in ["n0r", "n0q"]:
run(prod, ts)
if __name__ == "__main__":
# Do something
main(sys.argv)
|
akrherz/iem
|
scripts/summary/max_reflect.py
|
Python
|
mit
| 4,865
|
from socketio.namespace import BaseNamespace
from socketio.mixins import BroadcastMixin
class PointerNamespace(BaseNamespace, BroadcastMixin):
user_count = 0
def recv_connect(self):
PointerNamespace.user_count += 1
self.broadcast_event('update_count', PointerNamespace.user_count)
def on_moved(self, coordinates):
print coordinates
self.broadcast_event_not_me('move', {"user": '_' + self.socket.sessid, "x": coordinates['x'], "y": coordinates['y']})
def recv_disconnect(self):
PointerNamespace.user_count -= 1
self.broadcast_event('update_count', PointerNamespace.user_count)
self.disconnect(silent=True)
|
mburst/gevent-socketio-starterkit
|
pointer/realtime.py
|
Python
|
mit
| 696
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
# Imports =====================================================================
from .html_parser import HTMLParser
from .html_parser import _is_str
from .html_parser import _is_dict
from .html_parser import _is_iterable
# Variables ===================================================================
# Functions & classes =========================================================
class HTMLQuery(HTMLParser):
def containsParamSubset(self, params):
"""
Test whether this element contains at least all `params`, or more.
Args:
params (dict/SpecialDict): Subset of parameters.
Returns:
bool: True if all `params` are contained in this element.
"""
for key in params.keys():
if key not in self.params:
return False
if params[key] != self.params[key]:
return False
return True
def isAlmostEqual(self, tag_name, params=None, fn=None,
case_sensitive=False):
"""
Compare element with given `tag_name`, `params` and/or by lambda
function `fn`.
Lambda function is same as in :meth:`find`.
Args:
tag_name (str): Compare just name of the element.
params (dict, default None): Compare also parameters.
fn (function, default None): Function which will be used for
matching.
case_sensitive (default False): Use case sensitive matching of the
`tag_name`.
Returns:
bool: True if two elements are almost equal.
"""
if isinstance(tag_name, self.__class__):
return self.isAlmostEqual(
tag_name.getTagName(),
tag_name.params if tag_name.params else None
)
# search by lambda function
if fn and not fn(self):
return False
# compare case sensitive?
comparator = self._tagname # we need to make self._tagname lower
if not case_sensitive and tag_name:
tag_name = tag_name.lower()
comparator = comparator.lower()
# compare tagname
if tag_name and tag_name != comparator:
return False
# None params = don't use parameters to compare equality
if params is None:
return True
# compare parameters
if params == self.params:
return True
# test whether `params` dict is subset of self.params
if not self.containsParamSubset(params):
return False
return True
def find(self, tag_name, params=None, fn=None, case_sensitive=False):
"""
Same as :meth:`findAll`, but without `endtags`.
You can always get them from :attr:`endtag` property.
"""
return [
x for x in self.findAll(tag_name, params, fn, case_sensitive)
if not x.isEndTag()
]
def findB(self, tag_name, params=None, fn=None, case_sensitive=False):
"""
Same as :meth:`findAllB`, but without `endtags`.
You can always get them from :attr:`endtag` property.
"""
return [
x for x in self.findAllB(tag_name, params, fn, case_sensitive)
if not x.isEndTag()
]
def findAll(self, tag_name, params=None, fn=None, case_sensitive=False):
"""
Search for elements by their parameters using `Depth-first algorithm
<http://en.wikipedia.org/wiki/Depth-first_search>`_.
Args:
tag_name (str): Name of the tag you are looking for. Set to "" if
you wish to use only `fn` parameter.
params (dict, default None): Parameters which have to be present
in tag to be considered matching.
fn (function, default None): Use this function to match tags.
Function expects one parameter which is HTMLElement instance.
case_sensitive (bool, default False): Use case sensitive search.
Returns:
list: List of :class:`HTMLElement` instances matching your \
criteria.
"""
output = []
if self.isAlmostEqual(tag_name, params, fn, case_sensitive):
output.append(self)
tmp = []
for el in self.childs:
tmp = el.findAll(tag_name, params, fn, case_sensitive)
if tmp:
output.extend(tmp)
return output
def findAllB(self, tag_name, params=None, fn=None, case_sensitive=False):
"""
Simple search engine using `Breadth-first algorithm
<http://en.wikipedia.org/wiki/Breadth-first_search>`_.
Args:
tag_name (str): Name of the tag you are looking for. Set to "" if
you wish to use only `fn` parameter.
params (dict, default None): Parameters which have to be present
in tag to be considered matching.
fn (function, default None): Use this function to match tags.
Function expects one parameter which is HTMLElement instance.
case_sensitive (bool, default False): Use case sensitive search.
Returns:
list: List of :class:`HTMLElement` instances matching your \
criteria.
"""
output = []
if self.isAlmostEqual(tag_name, params, fn, case_sensitive):
output.append(self)
breadth_search = self.childs
for el in breadth_search:
if el.isAlmostEqual(tag_name, params, fn, case_sensitive):
output.append(el)
if el.childs:
breadth_search.extend(el.childs)
return output
def wfind(self, tag_name, params=None, fn=None, case_sensitive=False):
"""
This methods works same as :meth:`find`, but only in one level of the
:attr:`childs`.
This allows to chain :meth:`wfind` calls::
>>> dom = dhtmlparser.parseString('''
... <root>
... <some>
... <something>
... <xe id="wanted xe" />
... </something>
... <something>
... asd
... </something>
... <xe id="another xe" />
... </some>
... <some>
... else
... <xe id="yet another xe" />
... </some>
... </root>
... ''')
>>> xe = dom.wfind("root").wfind("some").wfind("something").find("xe")
>>> xe
[<dhtmlparser.htmlelement.HTMLElement object at 0x8a979ac>]
>>> str(xe[0])
'<xe id="wanted xe" />'
Args:
tag_name (str): Name of the tag you are looking for. Set to "" if
you wish to use only `fn` parameter.
params (dict, default None): Parameters which have to be present
in tag to be considered matching.
fn (function, default None): Use this function to match tags.
Function expects one parameter which is HTMLElement instance.
case_sensitive (bool, default False): Use case sensitive search.
Returns:
obj: Blank HTMLElement with all matches in :attr:`childs` property.
Note:
Returned element also have set :attr:`_container` property to True.
"""
childs = self.childs
if self._container: # container object
childs = map(
lambda x: x.childs,
filter(lambda x: x.childs, self.childs)
)
childs = sum(childs, []) # flattern the list
el = self.__class__() # HTMLElement()
el._container = True
for child in childs:
if child.isEndTag():
continue
if child.isAlmostEqual(tag_name, params, fn, case_sensitive):
el.childs.append(child)
return el
def match(self, *args, **kwargs):
"""
:meth:`wfind` is nice function, but still kinda long to use, because
you have to manually chain all calls together and in the end, you get
:class:`HTMLElement` instance container.
This function recursively calls :meth:`wfind` for you and in the end,
you get list of matching elements::
xe = dom.match("root", "some", "something", "xe")
is alternative to::
xe = dom.wfind("root").wfind("some").wfind("something").wfind("xe")
You can use all arguments used in :meth:`wfind`::
dom = dhtmlparser.parseString('''
<root>
<div id="1">
<div id="5">
<xe id="wanted xe" />
</div>
<div id="10">
<xe id="another wanted xe" />
</div>
<xe id="another xe" />
</div>
<div id="2">
<div id="20">
<xe id="last wanted xe" />
</div>
</div>
</root>
''')
xe = dom.match(
"root",
{"tag_name": "div", "params": {"id": "1"}},
["div", {"id": "5"}],
"xe"
)
assert len(xe) == 1
assert xe[0].params["id"] == "wanted xe"
Args:
*args: List of :meth:`wfind` parameters.
absolute (bool, default None): If true, first element will be
searched from the root of the DOM. If None,
:attr:`_container` attribute will be used to decide value
of this argument. If False, :meth:`find` call will be run
first to find first element, then :meth:`wfind` will be
used to progress to next arguments.
Returns:
list: List of matching elements (empty list if no matching element\
is found).
"""
if not args:
return self.childs
# pop one argument from argument stack (tuples, so .pop() won't work)
act = args[0]
args = args[1:]
# this is used to define relative/absolute root of the first element
def wrap_find(*args, **kwargs):
"""
Find wrapper, to allow .wfind() to be substituted witřh .find()
call, which normally returns blank array instead of blank
`container` element.
"""
el = self.__class__() # HTMLElement()
el.childs = self.find(*args, **kwargs)
return el
# if absolute is not specified (ie - next recursive call), use
# self._container, which is set to True by .wfind(), so next search
# will be absolute from the given element
absolute = kwargs.get("absolute", None)
if absolute is None:
absolute = self._container
find_func = self.wfind if absolute else wrap_find
result = None
if _is_iterable(act):
result = find_func(*act)
elif _is_dict(act):
result = find_func(**act)
elif _is_str(act):
result = find_func(act)
else:
raise KeyError(
"Unknown parameter type '%s': %s" % (type(act), act)
)
if not result.childs:
return []
match = result.match(*args)
# just to be sure return always blank array, when the match is
# False/None and so on (it shouldn't be, but ..)
return match if match else []
|
Bystroushaak/pyDHTMLParser
|
src/dhtmlparser/htmlelement/html_query.py
|
Python
|
mit
| 12,034
|
#encoding:utf-8
import json
import string
from random import choice
from functools import wraps
import time
import datetime
import paramiko
from dbutils import MySQLConnection as SQL
from dbutils import md5_str
from flask import session,redirect
class User(object):
def __init__(self,username,password,age,telphone,email):
self.id = id
self.username = username
self.password = password
self.age = age
self.telphone = telphone
self.email = email
@classmethod
# 定义装饰器函数,为了检查是否处于登陆状态
def login_check(cls,func):
@wraps(func) # 为了解决python多装饰器出现的bug
def check(*args, **kwargs):
if session.get('username') is None:
return redirect('/')
rt = func(*args, **kwargs)
return rt # 返回函数的值
return check # 返回内层函数的结果
@classmethod
def validate_login(cls,username, password):
_columns = ('id','username')
_sql = 'select * from user where username = %s and password = %s'
args = (username, md5_str(password))
sql_count, rt_list = SQL.excute_sql(_sql, args)
return dict(zip(_columns,rt_list[0])) if sql_count != 0 else None
@classmethod
def get_list(cls):
colloens = ('id', 'username', 'password', 'age', 'telphone', 'email')
_sql = 'select * from user'
rt = []
sql_count, rt_list = SQL.excute_sql(_sql) # 函数调用
for i in rt_list:
rt.append(dict(zip(colloens, i)))
return rt
@classmethod
def get_alone_user(cls,id):
users = cls.get_list()
return [i for i in users if i.get('id') == id ]
@classmethod
def user_add(cls,params):
username = params.get('username')
password = params.get('password')
age = params.get('age')
telphone = params.get('telphone')
email = params.get('email')
_sql_select = 'select * from user where username = %s'
_sql_insert = 'insert into user(username,password,age,telphone,email) values(%s,%s,%s,%s,%s)'
agrs1 = (username,)
_sql_count, rt_list = SQL.excute_sql(_sql_select, agrs1)
if _sql_count != 0:
return False, username + '已存在,请尝试其他的名字'
args2 = (username, md5_str(password), age, telphone, email)
SQL.excute_sql(_sql_insert, args2)
return True, '添加成功'
@classmethod
def user_update(cls,params):
username = params.get('username')
id = params.get('id')
age = params.get('age')
telphone = params.get('telphone')
email = params.get('email')
_sql = 'update user set age=%s ,telphone=%s ,email=%s where id=%s and username=%s'
args = (age, telphone, email, id, username)
_sql_count, rt_list = SQL.excute_sql(_sql, args)
if _sql_count != 0:
return True, '更新成功'
return False, '更新失败'
@classmethod
def user_del(cls,id, username):
_sql = 'delete from user where id=%s and username=%s'
args = (id, username)
_sql_count, rt_list = SQL.excute_sql(_sql, args)
if _sql_count != 0:
return True
return False
@classmethod
def valid_change_passwd(cls,uid, upass, muser, mpass):
if not cls.validate_login(muser, mpass):
return False, '管理员密码错误'
if cls.get_alone_user(uid): # 逻辑有问题,需要看
return False, '用户不存在'
if len(upass) < 6:
return False, '密码长度小于6个字符'
return True, '验证成功'
@classmethod
def change_passwd(cls,uid, upass):
_sql = 'update user set password = %s where id = %s'
_args = (md5_str(upass), uid)
_sql_count, rt_list = SQL.excute_sql(_sql, _args)
if _sql_count:
return True, '修改成功'
return False, '修改失败'
@classmethod
def user_reset(cls,id, username):
_sql = 'update user set password = %s where id=%s and username=%s'
newpassword = ''.join([choice(string.ascii_letters + string.digits) for i in range(8)])
args = (md5_str(newpassword), id, username)
_sql_count, rt_list = SQL.excute_sql(_sql, args)
if _sql_count != 0:
return True, '重置成功', newpassword
return False, '重置失败', newpassword
@classmethod
def validate_mpass(cls,params):
mgrpass = params.get('mgrpass')
mgruser = 'admin'
ip = params.get('ip')
cmd = params.get('cmd').split('\n')
_sql = 'select * from user where username=%s and password=%s'
_args = (mgruser,md5_str(mgrpass))
_sql_count,rt_list = SQL.excute_sql(_sql,_args)
if _sql_count != 0 :
_ssh = Ssh_cmd(ip,cmd)
return _ssh.ssh_cmd()
return False,'管理员密码验证失败'
class Ssh_cmd(object):
def __init__(self,ip,cmd=[]):
self.ip = ip
self.cmd = cmd
self.username = 'jcui'
self.password = '6522123'
self.port = 22
self._ssh = None
self.__conn()
def __conn(self):
try:
self._ssh = paramiko.SSHClient()
self._ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self._ssh.connect(self.ip, self.port, self.username,self.password)
except BaseException as e:
return False,e
def ssh_cmd(self):
_rt_list = []
if self._ssh:
for _cmd in self.cmd:
stdin, stdout, stderr = self._ssh.exec_command(_cmd.strip('\r'))
# _rt_list.append([_cmd, stdout.readlines(), stderr.readlines()])
_rt_list.append([stdout.readlines(), stderr.readlines()])
self._ssh.close()
return True,_rt_list
return False,'执行失败'
def ssh_sftp(self):
pass
class Logs(object):
@classmethod
def log_anslysis(cls,sfile):
file_dict = {}
try:
files = open(sfile, 'r')
for i in files:
i = i.split()
x, y, z = i[0], i[6], i[8]
file_dict[(x, y, z)] = file_dict.get((x, y, z), 0) + 1
except BaseException as e:
print e
return ''
finally:
if files:
files.close()
return sorted(file_dict.items(), key=lambda x: x[1], reverse=False)
@classmethod
def logs_import_sql(cls,logs_path):
# logs_path = '/home/op/test/www_access_20140823.log'
# logs_path = '/home/jcui/files/www_access_20140823.log'
log_list = cls.log_anslysis(logs_path)
_sql = 'insert into access_logs(ip,url,code,nums) values(%s,%s,%s,%s)'
if SQL.excute_log_sql(_sql, log_list):
return True
return False
@classmethod
def log_access(cls,top=10):
colloens = ('id', 'ip', 'url', 'code', 'nums')
_sql = 'select * from access_logs order by nums desc limit %s'
args = (top,)
rt = []
_sql_count, rt_list = SQL.excute_sql(_sql, args)
for x in rt_list:
rt.append(dict(zip(colloens, x)))
return rt
class Assets(object):
def __init__(self,id,sn,ip,hostname,os,cpu,ram,disk,idc_id,admin,business,purchase_date,warranty,vendor,model,status):
self.id = id
self.sn = sn
self.ip = ip
self.hostname = hostname
self.os = os
self.cpu = cpu
self.ram = ram
self.disk = disk
self.idc_id = idc_id
self.admin = admin
self.business = business
self.purchase_date = purchase_date
self.warranty = warranty
self.vendor = vendor
self.model = model
self.status = status
@classmethod
def get_list(cls):
_column = 'id,sn,ip,hostname,os,cpu,ram,disk,idc_name,admin,business,purchase_date,warranty,vendor,model,status'
_columns = _column.split(',')
_sql = 'select {column} from assets,idc_name where assets.status=0 and assets.idc_id = idc_name.idc_id;'.format(column=_column)
_cnt,_rt_list = SQL.excute_sql(_sql)
return [dict(zip(_columns,i)) for i in _rt_list ]
@classmethod
def get_by_id(cls,aid):
_column = 'id,sn,ip,hostname,os,cpu,ram,disk,idc_id,admin,business,purchase_date,warranty,vendor,model,status'
# _sql = 'select {coll} from assets,idc_name where assets.status=0 and assets.idc_id = idc_name.idc_id and id = %s'.format(coll=_column)
_sql = 'select {coll} from assets where id = %s'.format(coll=_column)
_args = (aid,)
_cnt, _rt_list = SQL.excute_sql(_sql, _args)
rt = []
if _cnt != 0:
for x in range(len(_column.split(','))):
if _column.split(',')[x] == 'purchase_date':
rt.append((_column.split(',')[x], _rt_list[0][x].strftime("%Y-%m-%d")))
continue
rt.append((_column.split(',')[x], _rt_list[0][x]))
return dict(rt)
return ''
@classmethod
def get_idc_name(cls):
_sql = 'select idc_id,idc_name from idc_name'
rt = []
_cnt, _rt_list = SQL.excute_sql(_sql)
for i in _rt_list:
rt.append(i)
return rt
@classmethod
def delete(cls,id):
_sql = 'update assets set status = 1 where id=%s'
_args = (id,)
_cnt, _rtlist = SQL.excute_sql(_sql, _args)
if _cnt != 0:
return True, '删除成功'
return False, '删除失败'
@classmethod
def ip_check(cls,ip):
q = ip.split('.')
return len(q) == 4 and len(filter(lambda x: x >= 0 and x <= 255, \
map(int, filter(lambda x: x.isdigit(), q)))) == 4
@classmethod
def validate_create(cls,params):
collent = params.keys()
result = {}
for i in collent:
if params[i] == '':
result[i] = '%s 不能为空' % i
# 检查SN的唯一
sn = params.get('sn').strip()
if len(sn) >= 6:
_sql = 'select * from assets where sn = %s and status = 0'
_args = (sn,)
_cnt, rt_list = SQL.excute_sql(_sql, _args)
if _cnt != 0:
result['sn'] = 'SN编码已存在'
else:
result['sn'] = 'SN编码太短'
# 检查IP的唯一
ip = params.get('ip').strip()
if cls.ip_check(ip):
_sql = 'select * from assets where ip = %s and status = 0'
_args = (ip,)
_cnt, rt_list = SQL.excute_sql(_sql, _args)
if _cnt != 0:
result['ip'] = 'IP地址已存在'
else:
result['ip'] = 'IP地址不合法'
# 检查主机名的唯一
hostname = params.get('hostname').strip()
_sql = 'select * from assets where hostname = %s and status = 0'
_args = (hostname,)
_cnt, rt_list = SQL.excute_sql(_sql, _args)
if _cnt != 0:
result['hostname'] = '主机名已存在'
if not result:
return cls.create(params)
return False, result.values()
@classmethod
def create(cls,params):
_collent = []
_values = []
for k, v in params.items():
_collent.append(k)
_values.append(v)
_sql = 'insert into assets({coll}) values%s'.format(coll=','.join(_collent))
_args = (tuple(_values),)
# print tuple(_values)
_cnt, _rtlist = SQL.excute_sql(_sql, _args)
if _cnt != 0:
return True, '添加成功'
return False, '入库失败'
@classmethod
def validate_update(cls,params):
collent = params.keys()
result = {}
for i in collent:
if params[i] == '':
result[i] = '%s 不能为空' % i
if not result:
return cls.update(params)
return False, result.values()
@classmethod
def update(cls,params):
_column = 'sn,ip,hostname,os,cpu,ram,disk,idc_id,admin,business,purchase_date,warranty,vendor,model'
id = params.get('id')
rt_set = []
_args = []
for i in _column.split(','):
# rt_set.append(i+'='+'\'%s\'' % params[i]) #预处理的方式是不需要加''的
rt_set.append('{collens}=%s'.format(collens=i))
_args.append(params.get(i))
_args.append(id)
_sql = 'update assets set {coll} where id = %s'.format(coll=','.join(rt_set))
# _args = (id,)
_cnt, _rtlist = SQL.excute_sql(_sql, _args)
if _cnt != 0:
return True, '更新成功'
return False, '更新失败'
class Performs(object):
@classmethod
def add(cls,req):
_ip = req.get('ip')
_cpu = req.get('cpu')
_ram = req.get('ram')
_time = req.get('time')
_sql = 'insert into performs(ip,cpu,ram,time) VALUES (%s,%s,%s,%s)'
SQL.excute_sql(_sql,(_ip,_cpu,_ram,_time),False)
@classmethod
def get_list(cls,ip):
_sql = 'select cpu,ram,time from performs where ip=%s and time >=%s ORDER by time asc'
_args = (ip,time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time() - 60*30)))
_cnt,_rt_list = SQL.excute_sql(_sql,_args)
cpu_list = []
ram_list = []
time_list = []
for _cpu,_ram,_time in _rt_list:
cpu_list.append(_cpu)
ram_list.append(_ram)
time_list.append(_time.strftime('%H:%M'))
# time_list.reverse()
# cpu_list.reverse()
# ram_list.reverse()
return time_list,cpu_list,ram_list
|
51reboot/actual_09_homework
|
10/jcui/cmdb/users/modules/modules.py
|
Python
|
mit
| 13,864
|
from distutils.core import setup
import os
import afpproxy
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name = 'afpproxy',
author = 'David Buxton',
author_email = 'david@gasmark6.com',
version = afpproxy.__version__,
license = 'MIT',
url = 'https://github.com/davidwtbuxton/afpproxy',
description = 'proxy for the AFP (AppleShare) protocol',
long_description = read('README.rst'),
classifiers = [
'Development Status :: 3 - Alpha',
'Framework :: Twisted',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Topic :: Internet :: Proxy Servers',
],
scripts = ['bin/afpproxy'],
packages = ['afpproxy'],
)
|
davidwtbuxton/afpproxy
|
setup.py
|
Python
|
mit
| 782
|
import json
from django.http import HttpResponse
from tagging.models import TaggedItem
from problem.models import Problem
def return_problems(request):
if request.method == "GET":
tag = request.GET.get("tag", default="tree")
queryset = TaggedItem.objects.get_by_model(Problem, tag)
problem_id_dic = {"problems": list(queryset.values_list("id", flat=True))}
return HttpResponse(json.dumps(problem_id_dic), content_type="application/json")
|
ultmaster/eoj3
|
api/views/tag.py
|
Python
|
mit
| 460
|
import cProfile
import pstats
def profile(func):
"""Decorator for run function profile"""
def wrapper(*args, **kwargs):
profile_filename = func.__name__ + '.prof'
profiler = cProfile.Profile()
result = profiler.runcall(func, *args, **kwargs)
profiler.dump_stats(profile_filename)
p = pstats.Stats(profile_filename)
p.sort_stats('time').print_stats(10)
return result
return wrapper
|
Alerion/fantasy_map
|
fantasy_map/utils/profile.py
|
Python
|
mit
| 451
|
from .editDialog import EditDialog
from .payDialog import PayDialog
|
coinbox/coinbox-mod-sales
|
cbmod/sales/views/dialogs/__init__.py
|
Python
|
mit
| 68
|
import flask
from hasalog import app
@app.route('/login', methods=['GET'])
def login():
if flask.g.member:
return flask.redirect('/list')
return flask.render_template('login.html')
@app.route('/logout', methods=['GET'])
def logout():
if flask.g.member:
session_id = flask.session['sid']
with flask.g.db.cursor() as cur:
cur.execute("""
DELETE FROM
m_session
WHERE
session_id = %s
""", [session_id])
flask.g.db.commit()
flask.session.clear()
flask.g.cookie['query'] = None
return flask.redirect('/login')
|
fujimotos/hasalog
|
hasalog/views/login.py
|
Python
|
mit
| 662
|
# -*- coding: utf-8 -*-
import os
import json
import argparse
import urllib2
from . import bottle
self_dir = os.path.dirname(__file__)
root_dir = None
def _to_json(val):
return json.dumps(val, ensure_ascii=True, sort_keys=True)
def _remove_keys_copy(d, *keys):
d1 = d.copy()
for k in keys:
try:
del d1[k]
except KeyError:
pass
return d1
def load_text(filename):
with open(os.path.join(root_dir, filename)) as f:
return f.read()
def load_json(filename):
_, ext = os.path.splitext(filename)
if ext == '.py':
l = {}
exec (load_text(filename), {'request': bottle.request, 'helpers': helpers}, l)
return _to_json(l.get('JSON', None))
else:
return load_text(filename)
def proxy(url, **kwargs):
# TODO: Method, Header, Body
resp = None
try:
resp = urllib2.urlopen(url)
return resp.getcode(), dict(resp.info()), resp.read()
finally:
resp.close()
def static_file(filename, download=False):
download_filename = False
if download:
download_filename = os.path.basename(filename) if download is True else download
return bottle.static_file(filename, root_dir, download=download_filename)
def render(body, status=200, header=None, content_type=None, cookie=None):
bottle.response.status = status
if header:
for k, v in header.items():
bottle.response.set_header(k, str(v))
if content_type:
bottle.response.content_type = content_type
if cookie:
for cookie in Cookie.as_list(cookie):
bottle.response.set_cookie(cookie.name, cookie.value, secret=cookie.secret, **cookie.options)
return body() if callable(body) else body
class Cookie(object):
def __init__(self, name, val, secret=None, **options):
self.name, self.value, self.secret, self.options = name, val, secret, options
@staticmethod
def as_list(cookie_or_cookies):
return [cookie_or_cookies] if isinstance(cookie_or_cookies, Cookie) else list(cookie_or_cookies)
class Response(object):
def __init__(self, body, status=200, header=None, content_type=None, cookie=None):
self.body = body
self.status, self.header, self.content_type, self.cookie = status, header, content_type, cookie
def render(self):
return render(self.body,
status=self.status,
header=self.header,
content_type=self.content_type,
cookie=self.cookie)
class Route(object):
def __init__(self, method, path):
self._method, self._path = method, path
self._response = None
def handle(self, *args, **kwargs):
if self._response is not None:
return self._response.render()
else:
bottle.response.status = 500
bottle.response.content_type = 'text/plain'
return 'Missing response'
def _setup_route(self, fn):
if self._method == 'ANY':
return bottle.route(self._path)(fn)
else:
return bottle.route(self._path, self._method, fn)
def __call__(self, fn):
return self._setup_route(fn)
# Response
def response(self, *args, **kwargs):
self._setup_route(self.handle)
self._response = Response(*args, **kwargs)
return self
# Redirect
def redirect(self, url):
return self.response(url, status=302, header={'Location': url})
# Static file
def static_file(self, filename, download=False, **kwargs):
return self.response(lambda: static_file(filename, download=download), **kwargs)
# proxy
def proxy(self, url, **proxy_args):
def do_proxy():
status, header, body = proxy(url, **proxy_args)
return render(body, status=status, content_type=header.get('content-type'))
self(do_proxy)
return self
# Content
def _make_content_resp(self, val, content_type, **kwargs):
return self.response(val, content_type=content_type, **_remove_keys_copy(kwargs, 'content_type'))
def text(self, val, **kwargs):
return self._make_content_resp(val, 'text/plain', **kwargs)
def html(self, val, **kwargs):
return self._make_content_resp(val, 'text/html', **kwargs)
def js(self, val, **kwargs):
return self._make_content_resp(val, 'text/javascript', **kwargs)
def json(self, val, **kwargs):
return self._make_content_resp(_to_json(val), 'application/json', **kwargs)
def load_text(self, filename, **kwargs):
return self._make_content_resp(lambda: load_text(filename), 'text/plain', **kwargs)
def load_html(self, filename, **kwargs):
return self._make_content_resp(lambda: load_text(filename), 'text/html', **kwargs)
def load_js(self, filename, **kwargs):
return self._make_content_resp(lambda: load_text(filename), 'text/javascript', **kwargs)
def load_json(self, filename, **kwargs):
return self._make_content_resp(lambda: load_json(filename), 'application/json', **kwargs)
def route(method, path):
return Route(method, path)
def _make_route(method):
return lambda path: route(method, path)
ANY = _make_route('ANY')
GET = _make_route('GET')
POST = _make_route('POST')
PUT = _make_route('PUT')
DELETE = _make_route('DELETE')
OPTIONS = _make_route('OPTIONS')
HEAD = _make_route('HEAD')
class _Helpers(dict):
def __getattr__(self, name):
return self[name]
helpers = _Helpers()
def helper(name=None):
if callable(name):
fn = name
helpers[fn.__name__] = fn
return name
else:
def decorator(fn_):
helpers[name if name else fn_.__name__] = fn_
return fn_
return decorator
def run(port=None, root=None):
global root_dir
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port', help="Port")
parser.add_argument('-d', '--dir', help="Content directory")
args = parser.parse_args()
port = port or args.port or 7928
root_dir = root or args.dir or os.getcwd()
GET('/+mookoo').load_html(os.path.join(self_dir, 'help.html'))
bottle.run(host='', port=port, debug=True, reloader=True)
def cli_entry():
import errno
def mkdir_p(dirname):
try:
os.makedirs(dirname)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(dirname):
pass
else:
raise
def write_text_file(filename, text):
with open(filename, 'w') as f:
f.write(text)
mock_templates = """
# -*- coding: utf-8 -*-
from mookoo import *
GET('/hello').json({"message": "Hello, I am MooKoo!"})
run()
"""
parser = argparse.ArgumentParser()
parser.add_argument('proj_dir', help="Mock project directory", default='')
args = parser.parse_args()
proj_dir = os.path.abspath(args.proj_dir if args.proj_dir else os.getcwd())
mkdir_p(proj_dir)
mock_filename = os.path.join(proj_dir, 'mock.py')
if not os.path.exists(mock_filename):
write_text_file(mock_filename, mock_templates.replace('\n', os.linesep))
|
gaorx/mookoo
|
mookoo.py
|
Python
|
mit
| 7,230
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('barsystem', '0029_person_special'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='sender',
field=models.ForeignKey(related_name='sender', to='barsystem.Person', null=True, blank=True, default=None),
),
]
|
TkkrLab/barsystem
|
barsystem/src/barsystem/migrations/0030_auto_20150717_1557.py
|
Python
|
mit
| 470
|
'''
Kodi video capturer for Hyperion
Copyright (c) 2013-2016 Hyperion Team
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import xbmc
import xbmcaddon
from misc import log
class MyMonitor (xbmc.Monitor):
'''Class to capture changes in settings and screensaver state
'''
def __init__(self, settings):
xbmc.Monitor.__init__(self)
self.__settings = settings
self.__settings.screensaver = xbmc.getCondVisibility("System.ScreenSaverActive")
self.__settings.abort = xbmc.abortRequested
def onAbortRequested(self):
self.__settings.abort = False
def onSettingsChanged(self):
self.__settings.readSettings()
def onScreensaverDeactivated(self):
self.__settings.screensaver = False
def onScreensaverActivated(self):
self.__settings.screensaver = True
class Settings:
'''Class which contains all addon settings and xbmc state items of interest
'''
def __init__(self):
'''Constructor
'''
self.rev = 0
self.__monitor = MyMonitor(self)
self.__player = xbmc.Player()
self.readSettings()
def __del__(self):
'''Destructor
'''
del self.__monitor
del self.__player
def readSettings(self):
'''(Re-)read all settings
'''
log("Reading settings")
addon = xbmcaddon.Addon()
self.enable = addon.getSetting('hyperion_enable').lower() == 'true'
self.enableScreensaver = addon.getSetting('screensaver_enable').lower() == 'true'
self.address = addon.getSetting("hyperion_ip")
self.port = int(addon.getSetting("hyperion_port"))
self.priority = int(addon.getSetting("hyperion_priority"))
self.timeout = int(addon.getSetting("reconnect_timeout"))
self.capture_width = int(addon.getSetting("capture_width"))
self.capture_height = int(addon.getSetting("capture_height"))
# Hack around Kodi's settings readout limitations
self.useDefaultDelay = addon.getSetting('use_default_delay').lower() == 'true'
self.delay = int(addon.getSetting("delay"))
self.delay24 = int(addon.getSetting("delay24"))
self.delay25 = int(addon.getSetting("delay25"))
self.delay50 = int(addon.getSetting("delay50"))
self.delay59 = int(addon.getSetting("delay59"))
self.delay60 = int(addon.getSetting("delay60"))
self.showErrorMessage = True
self.rev += 1
def grabbing(self):
'''Check if we grabbing is requested based on the current state and settings
'''
return self.enable and self.__player.isPlayingVideo() \
and (self.enableScreensaver or not self.screensaver)
|
poljvd/script.service.hyperion
|
resources/lib/settings.py
|
Python
|
mit
| 3,422
|
from __future__ import unicode_literals
from django.contrib.staticfiles.finders import find
from django.core.files.base import ContentFile
from django.utils.encoding import smart_str
from pipeline.compilers import Compiler
from pipeline.compressors import Compressor
from pipeline.conf import settings
from pipeline.exceptions import PackageNotFound
from pipeline.glob import glob
from pipeline.signals import css_compressed, js_compressed
from pipeline.storage import default_storage
class Package(object):
def __init__(self, config):
self.config = config
self._sources = []
@property
def sources(self):
if not self._sources:
paths = []
for pattern in self.config.get('source_filenames', []):
for path in glob(pattern):
if not path in paths and find(path):
paths.append(str(path))
self._sources = paths
return self._sources
@property
def paths(self):
return [path for path in self.sources
if not path.endswith(settings.PIPELINE_TEMPLATE_EXT)]
@property
def templates(self):
return [path for path in self.sources
if path.endswith(settings.PIPELINE_TEMPLATE_EXT)]
@property
def output_filename(self):
return self.config.get('output_filename')
@property
def extra_context(self):
return self.config.get('extra_context', {})
@property
def template_name(self):
return self.config.get('template_name')
@property
def variant(self):
return self.config.get('variant')
@property
def manifest(self):
return self.config.get('manifest', True)
@property
def source_map_filename(self):
return self.config.get('source_map_filename')
class Packager(object):
def __init__(self, storage=default_storage, verbose=False, css_packages=None, js_packages=None):
self.storage = storage
self.verbose = verbose
self.compressor = Compressor(storage=storage, verbose=verbose)
self.compiler = Compiler(verbose=verbose)
if css_packages is None:
css_packages = settings.PIPELINE_CSS
if js_packages is None:
js_packages = settings.PIPELINE_JS
self.packages = {
'css': self.create_packages(css_packages),
'js': self.create_packages(js_packages),
}
def package_for(self, kind, package_name):
try:
return self.packages[kind][package_name]
except KeyError:
raise PackageNotFound(
"No corresponding package for %s package name : %s" % (
kind, package_name
)
)
def individual_url(self, filename):
return self.storage.url(filename)
def pack_stylesheets(self, package, **kwargs):
return self.pack(package, self.compressor.compress_css, css_compressed,
output_filename=package.output_filename,
variant=package.variant, **kwargs)
def compile(self, paths, force=False):
return self.compiler.compile(paths, force=force)
def pack(self, package, compress, signal, **kwargs):
output_filename = package.output_filename
if self.verbose:
print("Saving: %s" % output_filename)
paths = self.compile(package.paths, force=True)
paths_written = [output_filename]
if package.source_map_filename:
(output_content, source_map_content) = compress(paths, source_map_filename=package.source_map_filename, **kwargs)
self.save_file(output_filename, output_content)
self.save_file(package.source_map_filename, source_map_content)
paths_written.append(package.source_map_filename)
else:
content = compress(paths, **kwargs)[0]
self.save_file(output_filename, content)
signal.send(sender=self, package=package, **kwargs)
return paths_written
def pack_javascripts(self, package, **kwargs):
return self.pack(package, self.compressor.compress_js, js_compressed, templates=package.templates, **kwargs)
def pack_templates(self, package):
return self.compressor.compile_templates(package.templates)
def save_file(self, path, content):
return self.storage.save(path, ContentFile(smart_str(content)))
def create_packages(self, config):
packages = {}
if not config:
return packages
for name in config:
packages[name] = Package(config[name])
return packages
|
zapier/django-pipeline
|
pipeline/packager.py
|
Python
|
mit
| 4,663
|
"""
`Cargo SQL Numeric and Float Fields`
--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--
The MIT License (MIT) © 2015 Jared Lunde
http://github.com/jaredlunde/cargo-orm
"""
import copy
import decimal
import babel.numbers
from vital.debug import preprX
from cargo.etc.types import *
from cargo.expressions import *
from cargo.fields.field import Field
from cargo.validators import NumericValidator
__all__ = ('Decimal', 'Float', 'Double', 'Currency', 'Money')
class BaseDecimal(Field, NumericLogic):
__slots__ = ('field_name', 'primary', 'unique', 'index', 'not_null',
'value', 'validator', '_alias', 'default', 'minval', 'maxval',
'table', '_context', 'decimal_places', '_quantize', 'locale',
'digits')
MINVAL = -9223372036854775808.0
MAXVAL = 9223372036854775807.0
def __init__(self, decimal_places=None, digits=-1, minval=MINVAL,
maxval=MAXVAL, context=None, rounding=decimal.ROUND_UP,
locale=babel.numbers.LC_NUMERIC, validator=NumericValidator,
**kwargs):
prec = decimal.MAX_PREC if digits is None or digits < 0 else digits
self.digits = digits
self._context = context or decimal.Context(prec=prec,
rounding=rounding)
self.decimal_places = decimal_places if decimal_places is not None \
else -1
self._quantize = None
if self.decimal_places > -1:
self._quantize = decimal.Decimal(10) ** (-1 * self.decimal_places)
self.locale = locale
super().__init__(validator=validator, **kwargs)
self.minval = minval
self.maxval = maxval
def __call__(self, value=Field.empty):
if value is not Field.empty:
if value is not None:
if isinstance(value, float):
value = self._context.create_decimal_from_float(value)
else:
value = self._context.create_decimal(self._to_dec(value))
if self._quantize is not None:
value = value.quantize(self._quantize)
self.value = value
return self.value
def for_json(self):
""":see::meth:Field.for_json"""
if self.value_is_not_null:
return float(self)
return None
def clear_copy(self, *args, **kwargs):
return Field.clear_copy(self, *args, decimal_places=self.decimal_places,
minval=self.minval, maxval=self.maxval,
locale=self.locale, context=self._context.copy(),
**kwargs)
class BaseDecimalFormat(object):
__slots__ = tuple()
def format(self, locale=None):
""" @locale: (#str) LC locale, .e.g., |en_DE|,
see::class:babel.core.Locale
-> (#str) formatted decimal
"""
if self.value_is_null:
return ''
return babel.numbers.format_decimal(self.value,
locale=locale or self.locale)
def to_percent(self, format=None, locale=None):
if self.value_is_null:
return ''
return babel.numbers.format_percent(self.value,
format=format,
locale=locale or self.locale)
def to_scientific(self, format=None, locale=None):
if self.value_is_null:
return ''
return babel.numbers.format_scientific(self.value,
format=format,
locale=locale or self.locale)
@staticmethod
def _to_dec(value):
if isinstance(value, (decimal.Decimal, float)):
return value
value = ''.join(digit if digit.isdigit() or digit in {'.', ','} else ''
for digit in str(value))
return babel.numbers.parse_decimal(value)
def from_string(self, value):
""" Fills the field with @value in its numeric/decimal form, removing
any currency symbols or punctuation.
"""
return self.__call__(self._to_dec(value))
class Decimal(BaseDecimal, BaseDecimalFormat):
""" ======================================================================
Field object for the PostgreSQL field type |DECIMAL|. Use this
field as opposed to :class:Float or :class:Double when exact digits
is necessary.
"""
__slots__ = BaseDecimal.__slots__
OID = DECIMAL
TWOPLACES = decimal.Decimal(10) ** -2
def __init__(self, decimal_places=None, digits=-1,
minval=BaseDecimal.MINVAL, maxval=BaseDecimal.MAXVAL,
context=None, rounding=decimal.ROUND_UP, **kwargs):
"""`Decimal`
==================================================================
@decimal_places: (#int) number of fixed decimal_places to quantize
:class:decimal.Decimal to, will also be used as |scale| in
the db if @digits is provided
@digits: (#int) the total count of significant digits in the
whole number, i.e., the number of digits to both sides of the
decimal point, i.e. |52.01234| has |7| significant digits.
@context: (:class:decimal.Context)
@rounding: (:attr:decimal.ROUND_UP or :attr:decimal.ROUND_DOWN)
@locale: (#str) LC locale, .e.g., |en_DE|,
see::class:babel.core.Locale, defaults to |en_US|
==================================================================
:see::meth:SmallInt.__init__
"""
super().__init__(minval=minval, maxval=maxval, context=context,
rounding=rounding, digits=digits,
decimal_places=decimal_places, **kwargs)
def quantize(self, *args, **kwargs):
return self.value.quantize(*args, **kwargs)
class Float(Field, NumericLogic, BaseDecimalFormat):
""" ======================================================================
Field object for the PostgreSQL field type |FLOAT4|
"""
__slots__ = ('field_name', 'primary', 'unique', 'index', 'not_null',
'value', 'validator', '_alias', 'default', 'minval', 'maxval',
'decimal_places', 'table', 'locale')
OID = FLOAT
MINVAL = BaseDecimal.MINVAL
MAXVAL = BaseDecimal.MAXVAL
def __init__(self, decimal_places=6, minval=MINVAL, maxval=MAXVAL,
locale=babel.numbers.LC_NUMERIC, validator=NumericValidator,
**kwargs):
"""`Float`
==================================================================
@decimal_places: (#int) number of digits after the decimal point to
round to
@locale: (#str) LC locale, .e.g., |en_DE|,
see::class:babel.core.Locale, defaults to |en_US|
==================================================================
:see::meth:SmallInt.__init__
"""
self.decimal_places = decimal_places
self.minval = minval
self.maxval = maxval
self.locale = locale
super().__init__(validator=validator, **kwargs)
def __call__(self, value=Field.empty):
if value is not Field.empty:
if value is not None:
value = self._to_dec(value)
if self.decimal_places and self.decimal_places != -1:
value = round(float(value), self.decimal_places)
else:
value = float(value)
self.value = value
return self.value
for_json = Decimal.for_json
def clear_copy(self, *args, **kwargs):
return Field.clear_copy(self, *args, minval=self.minval, maxval=self.maxval,
decimal_places=self.decimal_places,
locale=self.locale, **kwargs)
class Double(Float):
""" ======================================================================
Field object for the PostgreSQL field type |FLOAT8|
"""
__slots__ = Float.__slots__
OID = DOUBLE
def __init__(self, decimal_places=15, **kwargs):
"""`Double`
==================================================================
:see::meth:Float.__init__
"""
super().__init__(decimal_places=decimal_places, **kwargs)
class Currency(BaseDecimal):
""" ======================================================================
Field object for the PostgreSQL field type |DECIMAL| with
currency formatting options. There is no strict fixed 2-digit
digits with this field type, the digits can be mutable.
"""
__slots__ = ('field_name', 'primary', 'unique', 'index', 'not_null',
'value', 'validator', '_alias', 'default', 'minval', 'maxval',
'table', '_context', 'decimal_places', '_quantize', 'locale',
'code')
OID = CURRENCY
MINVAL = -92233720368547758.08
MAXVAL = 92233720368547758.07
def __init__(self, decimal_places=2, digits=-1, code='USD',
minval=MINVAL, maxval=MAXVAL, **kwargs):
"""`Currency`
==================================================================
@code: (#str) the currency code e.g., |BTC| for Bitcoin
or |GBP| British pounds.
==================================================================
:see::meth:Decimal.__init__
"""
self.code = code
super().__init__(minval=minval, maxval=maxval, digits=digits,
decimal_places=decimal_places, **kwargs)
__repr__ = preprX('name', 'code', 'str', keyless=True)
def __str__(self):
return self.format()
@property
def str(self):
return self.format()
@property
def symbol(self):
return babel.numbers.get_currency_symbol(self.code, self.locale)
def format(self, currency=None, format=None, locale=None,
currency_digits=True, format_type="standard"):
""" :see::func:babel.numbers.format_currency """
if self.value is None or self.value is self.empty:
return ''
return babel.numbers.format_currency(self.value,
currency or self.code,
format=format,
locale=locale or self.locale,
currency_digits=currency_digits,
format_type=format_type)
_to_dec = staticmethod(BaseDecimalFormat._to_dec)
def from_string(self, value):
""" Fills the field with @value in its numeric/decimal form, removing
any currency symbols or punctuation.
"""
return self.__call__(self._to_dec(value))
class Money(Currency):
""" ======================================================================
Field object for the PostgreSQL field type |MONEY| with
currency formatting options. There is a strict fixed 2-digit
digits with this field type.
"""
__slots__ = Currency.__slots__
OID = MONEY
def __init__(self, *args, **kwargs):
"""`Money`
==================================================================
@currency: (#str) the currency code e.g., |BTC| for Bitcoin
or |GBP| British pounds.
@rounding: (:attr:decimal.ROUND_DOWN or :attr:decimal.ROUND_UP)
@locale: (#str) LC locale, .e.g., |en_DE|,
see::class:babel.core.Locale, defaults to |en_US|
==================================================================
:see::meth:SmallInt.__init__
"""
super().__init__(*args, decimal_places=2, **kwargs)
@staticmethod
def _to_dec(value):
if isinstance(value, decimal.Decimal):
return value
value = ''.join(digit if digit.isdigit() or digit in {'.', ','} else ''
for digit in str(value))
return babel.numbers.parse_decimal(value).quantize(Decimal.TWOPLACES)
@staticmethod
def to_python(value, cur):
if value is None:
return value
return Money._to_dec(value)
@staticmethod
def register_adapter():
MONEYTYPE = reg_type('MONEYTYPE', MONEY, Money.to_python)
reg_array_type('MONEYARRAYTYPE', MONEYARRAY, MONEYTYPE)
def clear_copy(self, *args, **kwargs):
return Field.clear_copy(self,
*args,
minval=self.minval,
maxval=self.maxval,
locale=self.locale,
context=self._context.copy(),
**kwargs)
|
jaredlunde/cargo-orm
|
cargo/fields/numeric.py
|
Python
|
mit
| 13,124
|
import time
import io
import serial
import sys
import sensors
import logging
def write_to_serial(sercon, data):
if sercon.isOpen():
if data != None:
sercon.write(":{}#".format(data))
serport = serial.Serial(port='/dev/ttyS0', baudrate=9600)
write_data = ["SH", "SN0000", "SD04", "FG"]
try:
for cmd in write_data:
print cmd
write_to_serial(serport, cmd)
time.sleep(0.1)
except KeyboardInterrupt:
pass
serport.close()
#ser_port.serial_port.close()
|
Zimcoding/Raspy-Telescope
|
Raspy-Telescope/test_serial.py
|
Python
|
mit
| 540
|
#!/usr/bin/env python3
"""
responsible for calling other modules and interacting with user
To solve the challenge problem, run:
./main.py --count 7 7 -k2 -q2 -b2 -n1
"""
import sys
from time import time as now
import argparse
from pieces import ChessPiece
from solution import (
find_solutions_s,
find_solutions_r,
find_solutions_q,
)
from chess_util import format_board
from cmd_util import input_yesno
from cmd_chess_util import input_problem
def count_or_show_by_generator(gen, count_enable, row_count, col_count):
"""
gen: a generator returned by find_solutions_*
count_enable: bool, only count solutions/configurations, don't show them
"""
if count_enable:
print('Calculating, please wait... (Control+C to cancel)')
tm0 = now()
try:
solution_count = sum(1 for _ in gen)
except KeyboardInterrupt:
print('\nGoodbye')
return
delta = now() - tm0
print('Number of Unique Configurations: %s' % solution_count)
print('Running Time: %.4f seconds' % delta)
else:
print('Found Configurations:\n')
for board in gen:
print(format_board(board, row_count, col_count))
try:
input('Press Enter to see the next, Control+C to exit')
except KeyboardInterrupt:
print('\nGoodbye')
break
def interactive_main():
"""
ask the board size and pieces count
calculate and show all possible unique configurations
or just count unique configurations depending on user input
"""
row_count, col_count, count_by_symbol = input_problem()
count_enable = input_yesno(
'Count configurations? [Yes/No] ',
default=False,
)
gen = find_solutions_s(
row_count,
col_count,
count_by_symbol,
)
count_or_show_by_generator(
gen,
count_enable,
row_count,
col_count,
)
def argparse_main():
"""
parses the command line arguments and options, and performs operations
"""
parser = argparse.ArgumentParser(add_help=True)
parser.add_argument(
action='store',
dest='row_count',
type=int,
help='number of rows in the board',
)
parser.add_argument(
action='store',
dest='col_count',
type=int,
help='number of columns in the board',
)
parser.add_argument(
'-c',
'--count',
dest='count_enable',
action='store_true',
default=False,
help='only count the number of unique configurations, '
'don\'t show them',
)
parser.add_argument(
'--recursive',
dest='recursive',
action='store_true',
default=False,
help='use recursive implementation instead of stack implementation'
)
for cls in ChessPiece.class_list:
plural_name = cls.name + 's'
parser.add_argument(
'-' + cls.symbol.lower(),
'--' + plural_name,
dest=cls.name,
type=int,
default=0,
help='number of %s' % plural_name
)
args = parser.parse_args()
count_by_symbol = {
cls.symbol: getattr(args, cls.name, 0)
for cls in ChessPiece.class_list
}
find_solutions = find_solutions_r if args.recursive else find_solutions_s
gen = find_solutions(
args.row_count,
args.col_count,
count_by_symbol,
)
count_or_show_by_generator(
gen,
args.count_enable,
args.row_count,
args.col_count,
)
# ______________________ Test Functions ______________________ #
def compare_find_solutions_result():
"""
run and compare the result of 3 implementations of find_solutions
make sure they all return the same set of configurations
with no duplicates
"""
row_count, col_count, count_by_symbol = input_problem()
solution_set_list = []
# solution_set_list is a list of sets, one set for each implementation
func_list = (
find_solutions_r,
find_solutions_q,
find_solutions_s,
)
for func in func_list: # pylint!
solution_set = set()
for board in func(row_count, col_count, count_by_symbol):
board_tuple = tuple(sorted(board.items()))
assert board_tuple not in solution_set
solution_set.add(board_tuple)
solution_set_list.append(solution_set)
print('Number of solutions: %s (%s)' % (len(solution_set), func))
assert solution_set_list[1:] == solution_set_list[:-1] # all items equal
def compare_find_solutions_time():
"""
run and compare the running time of 3 implementations of find_solutions
"""
row_count, col_count, count_by_symbol = input_problem()
time_list = []
func_list = (
find_solutions_s,
find_solutions_r,
find_solutions_q,
find_solutions_s,
find_solutions_r,
find_solutions_q,
)
for func in func_list: # pylint!
tm0 = now()
for _ in func(row_count, col_count, count_by_symbol):
pass
delta = now() - tm0
time_list.append(delta)
print('%.4f seconds (%s)' % (delta, func))
if __name__ == '__main__':
if len(sys.argv) > 1:
argparse_main()
else:
interactive_main()
|
ilius/chess-challenge
|
main.py
|
Python
|
mit
| 5,435
|
from django.db import models
from django.contrib.admin.filters import SimpleListFilter
from django.utils.translation import gettext_lazy as _
from django.core.exceptions import PermissionDenied
from django.template.response import TemplateResponse
from django.contrib import admin
from django.contrib.admin.utils import get_model_from_relation
from django.core.exceptions import ValidationError
from django.contrib.admin.options import IncorrectLookupParameters
from django.utils.translation import get_language_bidi
from django.utils.encoding import smart_str
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
from taggit.models import TaggedItem
from .forms import (
TagObjectForm, get_fake_fk_form_class
)
def make_choose_object_action(model_or_queryset, callback, label):
if issubclass(model_or_queryset, models.Model):
model = model_or_queryset
filter_qs = None
else:
filter_qs = model_or_queryset
model = model_or_queryset.model
def action(self, request, queryset):
# Check that the user has change permission for the actual model
if not self.has_change_permission(request):
raise PermissionDenied
Form = get_fake_fk_form_class(
model, self.admin_site, queryset=filter_qs
)
# User has already chosen the other req
if request.POST.get('obj'):
form = Form(request.POST)
if form.is_valid():
action_obj = form.cleaned_data['obj']
callback(self, request, queryset, action_obj)
self.message_user(request, _("Successfully executed."))
return None
else:
form = Form()
opts = self.model._meta
context = {
'opts': opts,
'queryset': queryset,
'media': self.media,
'action_checkbox_name': admin.helpers.ACTION_CHECKBOX_NAME,
'form': form,
'headline': label,
'actionname': request.POST.get('action'),
'applabel': opts.app_label
}
# Display the confirmation page
return TemplateResponse(request, 'helper/admin/apply_action.html',
context)
action.short_description = label
return action
def make_batch_tag_action(
action_name='tag_all', field='tags', autocomplete_url=None,
short_description=None, template='admin_utils/admin_tag_all.html'):
def tag_func(self, request, queryset):
"""
Add tag to all selected objects
"""
opts = self.model._meta
# Check that the user has change permission for the actual model
if not self.has_change_permission(request):
raise PermissionDenied
# User has already chosen the other req
if request.POST.get('tags'):
form = TagObjectForm(request.POST, tags=[],
autocomplete_url=autocomplete_url)
if form.is_valid():
tags = form.cleaned_data['tags']
for obj in queryset:
if callable(field):
field(obj, tags)
else:
getattr(obj, field).add(*tags)
obj.save()
self.message_user(request, _("Successfully added tags"))
# Return None to display the change list page again.
return None
self.message_user(request, _("Form invalid"))
tags = set()
form = TagObjectForm(tags=tags,
autocomplete_url=autocomplete_url)
context = {
'opts': opts,
'queryset': queryset,
'media': self.media,
'form': form,
'headline': _('Set these tags for all selected items:'),
'action_checkbox_name': admin.helpers.ACTION_CHECKBOX_NAME,
'action_name': action_name,
'applabel': opts.app_label
}
# Display the confirmation page
return TemplateResponse(request, template, context)
if short_description is None:
short_description = _("Add tag to all selected")
tag_func.short_description = short_description
return tag_func
class NullFilter(SimpleListFilter):
"""
Taken from
http://stackoverflow.com/questions/7691890/filtering-django-admin-by-null-is-not-null
under CC-By 3.0
"""
title = ''
parameter_name = ''
def lookups(self, request, model_admin):
return (
('1', _('Has value')),
('0', _('None')),
)
def queryset(self, request, queryset):
kwargs = {
'%s' % self.parameter_name: None,
}
if self.value() == '0':
return queryset.filter(**kwargs)
if self.value() == '1':
return queryset.exclude(**kwargs)
return queryset
def make_nullfilter(field, title):
return type(str('%sNullFilter' % field.title()), (NullFilter,), {
'title': title,
'parameter_name': field
})
class GreaterZeroFilter(SimpleListFilter):
title = ''
parameter_name = ''
def lookups(self, request, model_admin):
return (
('1', _('Greater zero')),
('0', _('Zero')),
)
def queryset(self, request, queryset):
if self.value() == '0':
return queryset.filter(**{
'%s' % self.parameter_name: 0
})
if self.value() == '1':
return queryset.filter(**{
'%s__gt' % self.parameter_name: 0
})
return queryset
def make_greaterzerofilter(field, title):
return type(str('%sGreaterZeroFilter' % field.title()), (GreaterZeroFilter,), {
'title': title,
'parameter_name': field
})
class EmptyFilter(SimpleListFilter):
title = ''
parameter_name = ''
empty_value = ''
def lookups(self, request, model_admin):
return (
('0', _('Is empty')),
('1', _('Is not empty')),
)
def queryset(self, request, queryset):
val = self.empty_value
if callable(val):
val = val()
kwargs = {
'%s' % self.parameter_name: val,
}
if self.value() == '0':
return queryset.filter(**kwargs)
if self.value() == '1':
return queryset.exclude(**kwargs)
return queryset
def make_emptyfilter(field, title, empty_value=''):
return type(str('%sEmptyFilter' % field.title()), (EmptyFilter,), {
'title': title,
'parameter_name': field,
'empty_value': empty_value,
})
class TaggitListFilter(SimpleListFilter):
"""
A custom filter class that can be used to filter by taggit tags in the admin.
"""
# Human-readable title which will be displayed in the
# right admin sidebar just above the filter options.
title = _('tags')
# Parameter for the filter that will be used in the URL query.
parameter_name = 'tag'
tag_class = TaggedItem
def lookups(self, request, model_admin):
"""
Returns a list of tuples. The first element in each tuple is the coded value
for the option that will appear in the URL query. The second element is the
human-readable name for the option that will appear in the right sidebar.
"""
filters = []
tags = self.tag_class.tags_for(model_admin.model)
for tag in tags:
filters.append((tag.slug, _(tag.name)))
return filters
def queryset(self, request, queryset):
"""
Returns the filtered queryset based on the value provided in the query
string and retrievable via `self.value()`.
"""
if self.value():
return queryset.filter(tags__slug__in=[self.value()])
class ForeignKeyFilter(admin.FieldListFilter):
template = "helper/admin/fk_filter.html"
def __init__(self, field, request, params, model, model_admin, field_path):
super().__init__(
field, request, params, model, model_admin, field_path)
self.lookup_val = request.GET.get(self.field_path, None)
self.create_used_parameters()
def create_used_parameters(self):
param = self.field_path
val = self.used_parameters.pop(param, None)
if val is not None:
if val == 'isnull':
self.used_parameters['{}__isnull'.format(param)] = True
elif val == 'notnull':
self.used_parameters['{}__isnull'.format(param)] = False
else:
self.used_parameters['{}__in'.format(param)] = val.split(',')
def expected_parameters(self):
return [self.field_path]
def choices(self, cl):
return [{
'value': self.lookup_val,
'field_path': self.field_path,
'query_string': cl.get_query_string({},
[self.field_path]),
}]
class SearchFilter(ForeignKeyFilter):
def create_used_parameters(self):
param = self.field_path
val = self.used_parameters.pop(param, None)
if val is not None:
self.used_parameters['{}__startswith'.format(param)] = val
class MultiFilterMixin:
template = 'helper/admin/multi_filter.html'
lookup_name = ''
def queryset(self, request, queryset):
if request.GET.get(self.parameter_name):
lookup = self.parameter_name + self.lookup_name
values = self.value_as_list()
includes = [v for v in values if not v.startswith('~')]
excludes = [v[1:] for v in values if v.startswith('~')]
if includes:
for inc in includes:
queryset = queryset.filter(**{lookup: [inc]})
if excludes:
queryset = queryset.exclude(**{lookup: excludes})
return queryset
def value_as_list(self):
return self.value().split(',') if self.value() else []
def choices(self, changelist):
def amend_query_string(include=None, clear=None, exclude=None):
selections = self.value_as_list()
if include and include not in selections:
selections.append(include)
exclude_val = '~{}'.format(include)
if exclude_val in selections:
selections.remove(exclude_val)
if exclude:
exclude_val = '~{}'.format(exclude)
if exclude in selections:
selections.remove(exclude)
if exclude_val not in selections:
selections.append(exclude_val)
if clear:
if clear in selections:
selections.remove(clear)
exclude_val = '~{}'.format(clear)
if exclude_val in selections:
selections.remove(exclude_val)
if selections:
csv = ','.join(selections)
return changelist.get_query_string({self.parameter_name: csv})
else:
return changelist.get_query_string(remove=[self.parameter_name])
yield {
'selected': self.value() is None,
'query_string': changelist.get_query_string(remove=[self.parameter_name]),
'display': _('All'),
'reset': True,
}
values = self.value_as_list()
for lookup, title in self.lookup_choices:
yield {
'included': str(lookup) in values,
'excluded': '~{}'.format(lookup) in values,
'query_string': changelist.get_query_string({self.parameter_name: lookup}),
'include_query_string': amend_query_string(include=str(lookup)),
'clear_query_string': amend_query_string(clear=str(lookup)),
'exclude_query_string': amend_query_string(exclude=str(lookup)),
'display': title,
}
class TreeRelatedFieldListFilter(admin.RelatedFieldListFilter):
"""
From Django MPTT under MIT
https://github.com/django-mptt/django-mptt/blob/master/mptt/admin.py
Admin filter class which filters models related to parent model with all it's descendants.
"""
template = 'helper/admin/tree_filter.html'
tree_level_indent = 10
def __init__(self, field, request, params, model, model_admin, field_path):
self.other_model = get_model_from_relation(field)
if field.remote_field is not None and hasattr(field.remote_field, 'get_related_field'):
self.rel_name = field.remote_field.get_related_field().name
else:
self.rel_name = self.other_model._meta.pk.name
self.changed_lookup_kwarg = '%s__%s__inhierarchy' % (field_path, self.rel_name)
super().__init__(field, request, params, model, model_admin,
field_path)
self.lookup_val = request.GET.get(self.changed_lookup_kwarg)
def expected_parameters(self):
return [self.changed_lookup_kwarg, self.lookup_kwarg_isnull]
# Ripped from contrib.admin.filters,FieldListFilter Django 1.8 to deal with
# lookup name 'inhierarchy'
def queryset(self, request, queryset):
try:
# #### MPTT ADDITION START
if self.lookup_val:
other_model = self.other_model.objects.get(pk=self.lookup_val)
other_models = other_model.get_descendants()
del self.used_parameters[self.changed_lookup_kwarg]
self.used_parameters.update(
{'%s__%s__in' % (self.field_path, self.rel_name): other_models}
)
# #### MPTT ADDITION END
return queryset.filter(**self.used_parameters)
except ValidationError as e:
raise IncorrectLookupParameters(e)
# Adding padding_style to each choice tuple
def field_choices(self, field, request, model_admin):
tree_level_indent = getattr(model_admin, 'tree_level_indent', self.tree_level_indent)
language_bidi = get_language_bidi()
initial_choices = field.get_choices(include_blank=False)
pks = [pk for pk, val in initial_choices]
models = field.related_model._default_manager.filter(pk__in=pks)
levels_dict = {model.pk: model.get_depth() for model in models}
choices = []
for pk, val in initial_choices:
choices.append((
pk, val,
'right' if language_bidi else 'left',
tree_level_indent * levels_dict[pk]
))
return choices
# Ripped from contrib.admin.filters,RelatedFieldListFilter Django 1.8 to
# yield padding_dir, padding_size
def choices(self, cl):
# #### TREE ADDITION START
EMPTY_CHANGELIST_VALUE = self.empty_value_display
# #### TREE ADDITION END
yield {
'selected': self.lookup_val is None and not self.lookup_val_isnull,
'query_string': cl.get_query_string({}, [self.changed_lookup_kwarg, self.lookup_kwarg_isnull]),
'display': _('All'),
}
for pk_val, val, padding_dir, padding_size in self.lookup_choices:
yield {
'selected': self.lookup_val == smart_str(pk_val),
'query_string': cl.get_query_string({
self.changed_lookup_kwarg: pk_val,
}, [self.lookup_kwarg_isnull]),
'display': val,
# #### TREE ADDITION START
'padding_dir': padding_dir,
'padding_size': padding_size,
# #### TREE ADDITION END
}
if (isinstance(self.field, ForeignObjectRel) and
(self.field.field.null or isinstance(self.field.field, ManyToManyField)) or
self.field.remote_field is not None and
(self.field.null or isinstance(self.field, ManyToManyField))):
yield {
'selected': bool(self.lookup_val_isnull),
'query_string': cl.get_query_string({
self.lookup_kwarg_isnull: 'True',
}, [self.changed_lookup_kwarg]),
'display': EMPTY_CHANGELIST_VALUE,
}
|
stefanw/froide
|
froide/helper/admin_utils.py
|
Python
|
mit
| 16,318
|
# -*- coding: UTF-8 -*-
# 1.2. use Threading module create thread
# use Threading module create thread, from threading.Thread extend, then overwrite __init__ Method and run Method:
# !/usr/bin/python
import threading
import time
exitFlag = 0
class myThread(threading.Thread): # extend father threading.Thread
def __init__(self, threadID, name, counter):
threading.Thread.__init__(self)
self.threadID = threadID
self.name = name
self.counter = counter
def run(self): # need running code write in run function, after create thread running run function
print "Starting " + self.name
print_time(self.name, self.counter, 5)
print "Exiting " + self.name
def print_time(threadName, delay, counter):
while counter:
if exitFlag:
thread.exit()
time.sleep(delay)
print "%s: %s" % (threadName, time.ctime(time.time()))
counter -= 1
# create new thread
thread1 = myThread(1, "Thread-1", 1)
thread2 = myThread(2, "Thread-2", 2)
# start thread
thread1.start()
thread2.start()
print "Exiting Main Thread"
|
xiaoyong0312/Python-dev
|
Python2.x/Python2.x-1-high/101_thread_1.2.py
|
Python
|
mit
| 1,117
|
#!/usr/bin/env python
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
ext_modules = [
Extension('sidewinder._sidewinder', [
'sidewinder/_sidewinder.pyx', 'sidewinder/sidewinder.c'
]),
]
setup(
name='sidewinder',
version='0.1.0',
author='Alex Lee',
author_email='alex@thirdbeat.com',
url='http://sidewinder.thirdbeat.com',
description='Utility to transfer file descriptors using (send|recv)msg.',
long_description='''
Utility package to transfer file descriptors using sendmsg and recvmsg.
''',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Cython',
'Programming Language :: C',
'Programming Language :: Python',
],
license='MIT License',
cmdclass={'build_ext': build_ext},
packages=['sidewinder'],
ext_modules = ext_modules
)
|
easies/sidewinder
|
setup.py
|
Python
|
mit
| 929
|
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
#########################################################################
## This is a sample controller
## - index is the default action of any application
## - user is required for authentication and authorization
## - download is for downloading files uploaded in the db (does streaming)
## - call exposes all registered services (none by default)
#########################################################################
import os
import socket
import urllib2
from gluon import *
import sys
biopython_lib = request.folder + "biopython/"
sys.path.append(biopython_lib)
from Bio import Phylo
from Bio.Phylo import * #import biopython module
def index():
rootFolder = current.request.folder
folders = os.listdir(rootFolder+'static/sample_data/')
# Go through the subfolders within sample_data, only keep folders
# that match 'demo'
folders = [x.split('_',1)[1] for x in folders if x.split('_')[0] == 'demo']
folderLabels = [x.replace('_', ' ') for x in folders]
# Return a list of tuples, like [ ['Mammal_Hemoglobin', 'Mammal Hemoglobin'] , ... ]
# This lets us have nice display names in the <select> element in the view.
foldersAndLabels = zip(folders, folderLabels)
return dict(foldersAndLabels=foldersAndLabels)
def getFileName():
# Returns the absolute filename for the "input" gene tree
# file. Uses the treeFileName request parameter, as that's our
# main state-tracking value passed between the view and controller
# between each step.
rootFolder = current.request.folder
absoluteFileName = rootFolder+current.request.vars.treeFileName
return absoluteFileName
def getRelativeWebPath(suffix):
# Currently a no-op.
return suffix
def visualize():
# Visualize controller. Does some preprocessing before generating
# the HTML for viewing a given tree.
# Takes in the 'treeName' and 'file' params, and creates the URL
# which can be used to access / download the given tree file. This
# is only really necessary because the <applet> tag requires a
# parameter, which is then sent to Archaeopteryx, that requires
# the *entire* URL of the tree to load. With a non-applet based
# visualization this wouldn't be an issue.
treeFile = current.request.vars.treeName
suffix = current.request.vars.file
# Use the request http_host.
hostname = current.request.env.http_host
# Toggle to phylotastic URL if we detect a NESCENT socket host.
socket_hostname = socket.gethostname()
if 'phylotastic' in socket_hostname.lower():
hostname = 'phylotastic.nescent.org'
treeUrl = URL('static', 'sample_data/demo_'+treeFile+'/input_genetree.nwk'+suffix,
scheme='http', host=hostname)
return dict(treeUrl=treeUrl,
header=current.request.vars.header)
def getSpeciesList():
# Absolute file name of the input tree file.
absoluteFileName = getFileName()
# "Prefix", i.e. the input tree file without the ending
# '.txt'. This is used as the prefix for all generated output
# files
filePrefix = absoluteFileName[:-4]
shellCall = 'java -Xmx1024m -cp '
shellCall += current.request.folder+'static/lib/forester.jar '
shellCall += 'org.forester.application.gene_tree_preprocess '
shellCall += absoluteFileName
os.system(shellCall)
removedNodes = [l.strip() for l in open(filePrefix+'_removed_nodes.txt').readlines()]
keptNodes = [l.strip() for l in open(filePrefix+'_species_present.txt').readlines()]
geneTreeFile = getRelativeWebPath('_preprocessed_gene_tree.phylo.xml')
# Note: we always return the 'vizFile' and 'vizLabel' data values
# from each Ajax call, as they're used consistently by the
# front-end to generate visualization links.
return response.json( dict(vizFile = geneTreeFile,
vizLabel = "Input Gene Tree",
removedNodes = removedNodes,
keptNodes = keptNodes))
def getPhylotasticTree():
absoluteFileName = getFileName()
filePrefix = absoluteFileName[:-4]
# Load the kept nodes and create the comma-delimited species
# string for sending to PTastic
speciesList = [l.strip() for l in open(filePrefix+'_species_present.txt').readlines()]
# Need underscores instead of spaces
speciesList = [x.replace(' ', '_') for x in speciesList]
speciesString = ','.join(speciesList)
phylotasticUrlBase = 'http://phylotastic-wg.nescent.org/script/phylotastic.cgi?species='
speciesTreeUrl = phylotasticUrlBase+speciesString+'&tree=mammals&format=newick'
conn = urllib2.urlopen(speciesTreeUrl)
speciesTreeString = conn.read()
speciesTreeString = speciesTreeString.strip()
speciesTreeFilename = filePrefix+'_species_tree.txt'
open(speciesTreeFilename,'w').write(speciesTreeString)
#setting a counter for counting nodes i.e.number of species in the species newick tree.
#I have referenced the link http://biopython.org/DIST/docs/tutorial/Tutorial.html#htoc182 to understand the count_terminals() function
got_nodes=0 #counter to keep the count of the nodes
tree = Phylo.read(speciesTreeFilename, 'newick') #tree reads the species labels in newick format
got_nodes=BaseTree.TreeMixin.count_terminals(tree);
#no. of non-terminal nodes received in got_nodes.Now can be printed or checked with user input value to test if all species received.
speciesTreeWebFile = getRelativeWebPath('_species_tree.txt')
return response.json( dict(vizFile = speciesTreeWebFile,
vizLabel = "Phylotastic Species Tree",
got_nodes=got_nodes
) )
def reconcileTrees():
absoluteFileName = getFileName()
filePrefix = absoluteFileName[:-4]
geneTreeFile = filePrefix+'_preprocessed_gene_tree.phylo.xml'
speciesTreeFile = filePrefix+'_species_tree.txt'
shellCall = 'java -Xmx1024m -cp '
shellCall += current.request.folder+'static/lib/forester.jar '
shellCall += 'org.forester.application.gsdi '
shellCall += '-m -q '
shellCall += geneTreeFile
shellCall += ' '+speciesTreeFile
os.system(shellCall)
reconciledTreeWebFile = getRelativeWebPath('_preprocessed_gene_tree.phylo_gsdi_out.phylo.xml')
return response.json( dict(vizFile = reconciledTreeWebFile,
vizLabel = "Reconciled Tree (red=duplication, green=speciation)"
) )
|
prateekgupta3991/localrecon
|
controllers/reconciliotastic.py
|
Python
|
mit
| 6,655
|
"""
Python program that writes sentence-based concordance
of an input file to another output file.
Usage:
python concordance.py [path/to/input/file] [path/to/output/file]
"""
import sys
import string
import locale
import functools
def concordanceLexicon(string_list):
"""
Returns dictionary of sentence-based concordance
for words in the list of strings
"""
lexicon = {}
sentence_cursor = 0
sentence_ends = '.!?'
for chunk in string_list:
word = chunk.strip(string.punctuation).lower()
if len(word):
if word in lexicon:
lexicon[word].append(sentence_cursor)
else:
lexicon[word] = [sentence_cursor]
if chunk[-1] in sentence_ends:
sentence_cursor += 1
return lexicon
def concordanceFormat(lexicon):
"""
Return string format for a concordance lexicon
"""
output = ""
if len(lexicon.keys()):
# Targeting English US for sorting
locale.setlocale(locale.LC_ALL, 'en_US')
sorted_words = sorted(lexicon.keys(),
key=functools.cmp_to_key(locale.strcoll))
header = 'Word count: ' + str(len(sorted_words)) + '\n'
output += header
for word in sorted_words:
entry = lexicon[word]
line = [word, ' -> {', str(len(entry)), ' : ', str(entry), '}\n']
output += ''.join(line)
else:
output = "No word found.\n"
return output
def splitTextBySpace(input_file):
"""
Split the file text by space
"""
lines = [line.split() for line in input_file]
flat_list = sum(lines, [])
return flat_list
if __name__ == '__main__':
with open(sys.argv[2], 'w') as output_file:
with open(sys.argv[1], 'r') as input_file:
string_list = splitTextBySpace(input_file)
findings = concordanceLexicon(string_list)
results = concordanceFormat(findings)
output_file.write(results)
|
julio73/scratchbook
|
code/concordance/concordance.py
|
Python
|
mit
| 2,003
|
from flask import request, jsonify
from flask_login import login_required, current_user, \
login_user, logout_user
from sixquiprend.models.user import User
from sixquiprend.sixquiprend import app
@app.route('/login', methods=['POST'])
def login():
user = User.login(request.get_json()['username'], request.get_json()['password'])
login_user(user, remember=True)
return jsonify(user=user), 201
@app.route('/logout', methods=['POST'])
@login_required
def logout():
current_user.logout()
logout_user()
return jsonify(), 201
@app.route('/users/register', methods=['POST'])
def register():
if app.config['ALLOW_REGISTER_USERS'] != True:
return jsonify(error='Registering is deactivated'), 403
user = User.register(request.get_json()['username'], request.get_json()['password'])
return jsonify(user=user), 201
|
nyddogghr/SixQuiPrend
|
sixquiprend/routes/login_logout.py
|
Python
|
mit
| 856
|
question_type = 'input_output'
source_language = 'C'
hotspot_declarations = [
['$x','int'],['$y','int'],
['$out0','string'],['$out1','string'],['$out2','string'],
['$out3','string'],['$out4','string'],['$out5','string'],
['$out6','string'],['$out7','string'],['$out8','string'],
]
group_list = [
['swap_io_',
[1,2, None,None,None, None,None,None, None,None,None],
[2,1, None,None,None, None,None,None, None,None,None],
[None,None, None,None,None, None,None,None, None,None,None],
]
]
global_code_template = '''\
d #include <stdio.h>
x #include <stdio.h>
dx
'''
main_code_template = '''\
dx int x,y,z;
dx
dx x = $x;
dx y = $y;
dx
dx z = x;
dx printf("x:%d\\ty:%d\\tz:%d\\n",x,y,z);
dx x = y;
dx printf("x:%d\\ty:%d\\tz:%d\\n",x,y,z);
dx y = z;
dx printf("x:%d\\ty:%d\\tz:%d\\n",x,y,z);
dx
'''
argv_template = ''
stdin_template = ''
stdout_template = '''\
x:$out0 y:$out1 z:$out2
x:$out3 y:$out4 z:$out5
x:$out6 y:$out7 z:$out8
'''
|
stryder199/RyarkAssignments
|
Assignment2/ttt/eop/chapter3/swap_io.py
|
Python
|
mit
| 966
|
from .parse_links_for_text import parse_links_for_text
def flip_instructor(name):
string_to_split = reversed(name.split(','))
actual_name = ' '.join([name_part.strip() for name_part in string_to_split])
return actual_name.strip()
def split_and_flip_instructors(instructors):
# Take a string like 'Bridges IV, William H.' and split/flip it
# to 'William H. Bridges IV'. Oh, and do that for each professor.
return [flip_instructor(name) for name in parse_links_for_text(instructors)]
|
StoDevX/course-data-tools
|
lib/split_and_flip_instructors.py
|
Python
|
mit
| 511
|
from .config import Config
from .domain import Domain
from .calendar import Calendar, SyncedCalendar
|
ternus/gcal-bridge
|
gcalbridge/__init__.py
|
Python
|
mit
| 101
|
from django.contrib import admin
from charms.models import Charm
# Register your models here.
admin.site.register(Charm)
|
velxundussa/Storyteller
|
charms/admin.py
|
Python
|
mit
| 123
|
#!/usr/bin/env python3
'''
You have a function with one side of the DNA string; you need to get the other complementary side.
DNA_strand ("ATTGC") # return "TAACG"
DNA_strand ("GTAT") # return "CATA"
'''
def DNA_strand(dna):
d = {"A":"T", "T":"A", "G":"C", "C":"G"}
replaced = ''.join(d.get(char, char) for char in dna)
return replaced
#Other methods
|
JLJTECH/TutorialTesting
|
CodeWars/2019/CompDNA7k.py
|
Python
|
mit
| 364
|
#!/usr/bin/env python
''' readNwriteTextFiles.py -- create and read text file
options:
'i' followed with line number - insert a line
'e' followed with line number - edit a line
'p' - print whole file
any character than abowe - exit
'''
import os
ls = os.linesep
def insertLineNum(option):
if option == 'i' or option == 'e':
linenum = int(raw_input('Enter the line number: '))
if not isinstance(linenum, int):
print "error: must be number "
exit()
else:
return linenum
elif option != 'p':
return 0
return 1
def editFile():
filename = raw_input('Enter file name: ')
handler = open(filename, 'a+')
text = []
for line in handler:
text.append(line)
# loop until user terminates
# import pdb; pdb.set_trace()
while True:
option = raw_input("Enter the option: 'i' insert line, 'e' edit line, 'p' print file, any other char will exit")
linenum = insertLineNum(option)
if option == 'p':
for l in text:
print l,
elif linenum == 0:
break
else:
entry = raw_input('> ')
entry = '{0}{1}'.format(entry, ls)
if option == 'i':
text.insert(linenum-1, entry)
elif option == 'e':
text[linenum-1] = entry
# write all lines to file with proper line-ending
handler = open(filename, 'w')
handler.writelines(['{0}'.format(x) for x in text])
handler.close()
print 'write to file DONE!'
def readFile():
filename = raw_input("Enter file name: ")
try:
handler = open(filename, 'r')
except IOError, e:
print '*** error: file {0} does not exist'.format(filename)
else:
for line in handler:
print line,
handler.close()
option = raw_input("Enter the read/edit option: ")
if option == 'read':
readFile()
elif option == 'edit':
editFile()
else:
print "Correct options are 'read' or 'edit'"
|
dragancvetic/py_training
|
book1/ch03/readNeditTextFile.py
|
Python
|
mit
| 2,056
|
"""General option parser."""
# Copyright (c) 2001-2009 ElevenCraft Inc.
# See LICENSE for details.
from optparse import OptionParser
from schevo import trace
def set_trace(option, opt, value, parser):
trace.print_history(value)
trace.monitor_level = value
trace.log(1, 'Tracing level set to', value)
def parser(usage):
p = OptionParser(usage)
p.add_option('-T', '--trace',
help='Set Schevo tracing level.',
action='callback',
callback=set_trace,
type=int,
)
return p
|
Schevo/schevo
|
schevo/script/opt.py
|
Python
|
mit
| 583
|
'''
Follow these steps to configure the slash command in Slack:
1. Navigate to https://<your-team-domain>.slack.com/services/new
2. Search for and select "Slash Commands".
3. Enter a name for your command and click "Add Slash Command Integration".
4. Copy the token string from the integration settings and use it in the next section.
5. After you complete this blueprint, enter the provided API endpoint URL in the URL field.
To encrypt your secrets use the following steps:
1. Create or use an existing KMS Key - http://docs.aws.amazon.com/kms/latest/developerguide/create-keys.html
2. Click the "Enable Encryption Helpers" checkbox
3. Paste <COMMAND_TOKEN> into the kmsEncryptedToken environment variable and click encrypt
'''
import boto3
import json
import logging
import os
from base64 import b64decode
from urlparse import parse_qs
import handler
from bobo import settings
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def respond(err, res=None):
return {
'statusCode': '400' if err else '200',
'body': err.message if err else json.dumps(res),
'headers': {
'Content-Type': 'application/json',
},
}
def lambda_handler(event, context):
params = parse_qs(event['body'])
token = params['token'][0]
if token != settings.SLACK['expected_token']:
logger.error("Request token (%s) does not match expected", token)
return respond(Exception('Invalid request token'))
user = params['user_name'][0]
command_text = params['text'][0]
return respond(None, handler.trigger(user, command_text))
|
greginvm/groceries-bot
|
slack/lambda.py
|
Python
|
mit
| 1,621
|
# -*- coding: utf-8 -*-
import os
from openre.agent.server.state import is_running
import uuid
import signal
import logging
def stop_process(event, name=None, id=None):
"""
If name is not None, than check that state have the same name
"""
# on the second run pid is already in event.context['id']
pid = event.context.get('id', id)
state = None
process_state = event.pool.context['server'].process_state
def is_proper_name(name, state_name):
if not name:
return False
if '.' not in name and '.' in state_name \
and state_name.startswith('%s.' % name):
return True
if name == state_name:
return True
return False
if isinstance(pid, uuid.UUID) or str(pid) in process_state:
state = process_state[str(pid)]
pid = state['pid']
elif isinstance(pid, int):
for stt in process_state.values():
if stt['pid'] == pid or (stt['pid'] == 0 and stt['was_pid'] == pid):
state = stt
break
elif name:
rows = []
for stt in process_state.values():
if is_proper_name(name, stt['name']) and stt['pid']:
state = stt
rows.append(stt)
pid = state['pid']
if len(rows) > 1:
command = ''
for row in rows:
command += '%s (status: %s)\n' % (row['pid'], row['status'])
return event.failed(
'There are %s processes with name "%s".' \
' Specify which one to stop:\n%s'
% (len(rows), name, command)
)
if not state:
return event.failed(
'Process state not found for name="%s", id="%s", cant kill'
% (name, pid))
if state['pid'] == 0 and state['was_pid'] != 0:
pid = state['was_pid']
# first run of the task
if 'id' not in event.context:
event.context['id'] = state['id']
if not is_running(state):
return event.failed('%s already stopped.' % name.capitalize())
if not isinstance(pid, int) or not pid:
return event.failed('Wrong pid format %s' % repr(pid))
if not is_proper_name(name, state['name']):
return event.failed(
'Process state name "%s" not equal "%s", cant kill' % (
state['name'], name))
if state['status'] not in ['exit', 'error', 'kill', 'clean']:
try:
os.kill(pid, signal.SIGTERM)
logging.debug('Kill %s', pid)
process_state[str(state['id'])] = {
'status': 'kill',
}
event.expire(600)
event.timeout(1)
except OSError:
process_state[str(state['id'])] = {
'status': 'error',
'message': 'Exit but not properly cleaned',
'pid': 0,
}
return
if state['status'] == 'exit' and state['pid'] == 0 \
and state['was_pid'] != 0 and pid:
try:
os.kill(pid, 0)
try:
os.kill(pid, signal.SIGTERM)
logging.debug('Kill hang process %s', pid)
event.expire(600)
except OSError:
process_state[str(state['id'])] = {
'status': 'error',
'message': 'Exit but not properly cleaned',
'pid': 0,
}
except OSError: #No process with locked PID
pass
process_state[str(state['id'])] = {
'was_pid': 0,
}
return
if state['status'] in ['kill', 'clean']:
try:
os.kill(pid, 0)
except OSError: #No process with locked PID
# exception while cleaning
if state['status'] == 'clean':
process_state[str(state['id'])] = {
'status': 'error',
'message': 'Exit but not properly cleaned',
'pid': 0,
}
logging.warn(
'Process with pid %s stopped with errors', pid)
else:
logging.debug(
'Successfully stopped process with pid %s', pid)
return
event.timeout(1)
|
openre/openre
|
openre/agent/server/helpers.py
|
Python
|
mit
| 4,316
|
def main():
num = 2**1000
res = 0
while(num > 0):
res += num % 10
num //= 10
print("If you can trust me, the number you are looking for is " + str(res))
main()
|
PysKa-Ratzinger/personal_project_euler_solutions
|
solutions/001-025/16/main.py
|
Python
|
mit
| 194
|
from flask import request, Flask
import ldap
app = Flask(__name__)
@app.route("/normal")
def normal():
"""
A RemoteFlowSource is used directly as DN and search filter
"""
unsafe_dc = request.args['dc']
unsafe_filter = request.args['username']
dn = "dc={}".format(unsafe_dc)
search_filter = "(user={})".format(unsafe_filter)
ldap_connection = ldap.initialize("ldap://127.0.0.1")
user = ldap_connection.search_s(
dn, ldap.SCOPE_SUBTREE, search_filter)
@app.route("/direct")
def direct():
"""
A RemoteFlowSource is used directly as DN and search filter using a oneline call to .search_s
"""
unsafe_dc = request.args['dc']
unsafe_filter = request.args['username']
dn = "dc={}".format(unsafe_dc)
search_filter = "(user={})".format(unsafe_filter)
user = ldap.initialize("ldap://127.0.0.1").search_s(
dn, ldap.SCOPE_SUBTREE, search_filter)
@app.route("/normal_argbyname")
def normal_argbyname():
"""
A RemoteFlowSource is used directly as DN and search filter, while the search filter is specified as
an argument by name
"""
unsafe_dc = request.args['dc']
unsafe_filter = request.args['username']
dn = "dc={}".format(unsafe_dc)
search_filter = "(user={})".format(unsafe_filter)
ldap_connection = ldap.initialize("ldap://127.0.0.1")
user = ldap_connection.search_s(
dn, ldap.SCOPE_SUBTREE, filterstr=search_filter)
# if __name__ == "__main__":
# app.run(debug=True)
|
github/codeql
|
python/ql/test/query-tests/Security/CWE-090-LdapInjection/ldap_bad.py
|
Python
|
mit
| 1,512
|
# coding=utf-8
"""Item generator base class
.. moduleauthor:: Stéphane Vialette <vialette@gmail.com>
"""
class TimedItemGenerator(object):
"""Timed item generator.
"""
# state
NOT_STARTED = 0
RUNNING = 1
STOPPED = 2
# number of produced items
NUMBER_OF_ITEMS = 0
def __init__(self, item_generator, timeout_generator):
"""Initialize this TimedItemGenerator object.
"""
self._item_generator = item_generator
self._timeout_generator = timeout_generator
self._state = TimedItemGenerator.NOT_STARTED
@property
def state(self):
"""Return the state of this TimedItemGenerator.
"""
return self._state
def number_of_items(self):
"""Return the number of generated items so far.
"""
return TimedItemGenerator.NUMBER_OF_ITEMS
def stop(self):
"""Stop this TimedItemGenerator object.
"""
self._state = TimedItemGenerator.STOPPED
def run(self, environment, simulator):
"""Generate random item with random timeout delay.
"""
if self._state != TimedItemGenerator.RUNNING:
self._state = TimedItemGenerator.RUNNING
try:
while self.state == TimedItemGenerator.RUNNING:
# random delay between two items
yield environment.timeout(self._timeout_generator.timeout())
# get a new random item
TimedItemGenerator.NUMBER_OF_ITEMS += 1
item_generator_class_name = self._item_generator.__class__.__name__
number_of_generated_items_so_far = TimedItemGenerator.NUMBER_OF_ITEMS
item_name = "{}-{}".format(item_generator_class_name, number_of_generated_items_so_far)
item = self._item_generator.item(item_name)
# send this new item to the simulator
simulator.add_item(environment, item)
except Exception as e:
raise#return
|
vialette/ultrastorage
|
ultrastorage/timeditemgenerator/timeditemgenerator.py
|
Python
|
mit
| 2,009
|
# -*- coding: utf-8 -*-
from double_linked import DoubleLinkedList
class Queue(object):
'''Stack is a composition of LinkedList'''
def __init__(self, input=None):
self.queue = DoubleLinkedList(input)
def enqueue(self, val):
self.queue.insert(val)
def dequeue(self):
shift_val = self.queue.shift()
return shift_val
def size(self):
return self.queue._size()
def peek(self):
return self.queue.tail.data
|
paulsheridan/data-structures
|
src/queue.py
|
Python
|
mit
| 480
|
__author__ = 'tom.bailey'
import pymel.core as pm
import pymel.core.datatypes as dt
import re
import rig.parent_tools as pt
reload(pt)
def orient_snap(source, target):
pm.xform(source,
absolute=True,
worldSpace=True,
rotation=pm.xform(target,
query=True,
absolute=True,
worldSpace=True,
rotation=True))
def point_snap(source, target):
"""
works with odd frozen translate objects by measuring things
"""
source_pivot = dt.Vector(pm.xform(source, query=True, worldSpace=True, rotatePivot=True))
target_pivot = dt.Vector(pm.xform(target, query=True, worldSpace=True, rotatePivot=True))
source_ws = dt.Vector(pm.xform(source, query=True, worldSpace=True, translation=True))
_position_difference = target_pivot - source_pivot
_position_result = source_ws + _position_difference
pm.xform(source, absolute=True, worldSpace=True, translation=_position_result)
def variable_point_snap(source, snapObject, target):
"""
works with odd frozen translate objects by measuring things
"""
print "variable snap", source, snapObject, target
source_pivot = dt.Vector(pm.xform(source, query=True, worldSpace=True, rotatePivot=True))
snap_pivot = dt.Vector(pm.xform(source, query=True, worldSpace=True, translation=True))
target_pivot = dt.Vector(pm.xform(target, query=True, worldSpace=True, translation=True))
source_ws = dt.Vector(pm.xform(source, query=True, absolute=True, worldSpace=True, translation=True))
_position_difference = target_pivot - snap_pivot
print snap_pivot, "\n", target_pivot, "\n", _position_difference
_position_result = source_ws + _position_difference
pm.xform(source, absolute=True, worldSpace=True, translation=_position_result)
class SnapTools():
def __init__(self):
pass
def snap_selection(self):
sel = pm.ls(selection=True)
if len(sel) >= 2:
original = sel[0]
target = sel[1]
original_rotation = xforms().get_world_rotation(original)
original_pivot_position = xforms().get_world_pivot(original)
original_world_position = xforms().get_world_space(original)
target_rotation = xforms().get_world_rotation(target)
target_pivot_position = xforms().get_world_pivot(target)
target_world_position = xforms().get_world_space(target)
_pivot_difference = vector_tools().minus(target_pivot_position, original_pivot_position)
_out_position = vector_tools().plus(original_world_position, _pivot_difference)
xforms.set_world_space(original, _out_position)
xforms.set_world_rotation(original, target_rotation)
xforms.set_world_rotation(original, target_rotation)
#orient_snap(original, target)
rot = pm.xform(target, query=True, absolute=True, worldSpace=True, rotation=True)
node_ro = pm.xform(original, query=True, rotateOrder=True)
ro = pm.xform(target, query=True, rotateOrder=True)
pm.xform(original, absolute=True, worldSpace=True, rotation=rot, rotateOrder=ro, preserve=True)
pm.xform(original, rotateOrder=node_ro, preserve=True)
@staticmethod
def store_transform():
sel = pm.ls(selection=True)
if sel:
_position = xforms().get_world_space(sel[0])
_rotation = xforms().get_world_rotation(sel[0])
pm.optionVar(floatValue=('NT_xform_Pos_1', _position[0]))
pm.optionVar(floatValueAppend=('NT_xform_Pos_1', _position[1]))
pm.optionVar(floatValueAppend=('NT_xform_Pos_1', _position[2]))
pm.optionVar(floatValue=('NT_xform_Rot_1', _rotation[0]))
pm.optionVar(floatValueAppend=('NT_xform_Rot_1', _rotation[1]))
pm.optionVar(floatValueAppend=('NT_xform_Rot_1', _rotation[2]))
print pm.optionVar['NT_xform_Pos_1']
print pm.optionVar['NT_xform_Rot_1']
pass
def restore_transform(self):
sel = pm.ls(selection=True)
if sel:
_position = pm.optionVar['NT_xform_Pos_1']
_rotation = pm.optionVar['NT_xform_Rot_1']
xforms().set_world_space(sel[0], _position)
xforms().set_world_rotation(sel[0], _rotation)
pass
class xforms():
def __init__(self):
pass
@staticmethod
def get_world_pivot(node):
# get the world pivot
return pm.xform(node, query=True, worldSpace=True, rotatePivot=True)
@staticmethod
def get_world_space(node):
# gets the world space, not really world space tho, just what maya thinks is world space
return pm.xform(node, query=True, relative=True, worldSpace=True, translation=True)
@staticmethod
def set_world_space(node, position):
# set the world space position on the object
pm.xform(node, absolute=True, worldSpace=True, translation=position)
@staticmethod
def get_world_rotation(node):
# get the absolute world rotation of the object
return pm.xform(node, query=True, absolute=True, worldSpace=True, rotation=True)
@staticmethod
def set_world_rotation(node, rotation):
# set the absolute world rotation
pm.xform(node, absolute=True, worldSpace=True, rotation=rotation)
class vector_tools():
@staticmethod
def minus(vector1, vector2):
return [vector1[0] - vector2[0], vector1[1] - vector2[1], vector1[2] - vector2[2]]
@staticmethod
def plus(vector1, vector2):
return [vector1[0] + vector2[0], vector1[1] + vector2[1], vector1[2] + vector2[2]]
def snap_pivots(meta_node, snapNode, extraKey=False, reverse=False):
ctrlNode = pm.listConnections(meta_node.control)[0]
attachNode = pm.listConnections(meta_node.attach_pivot)[0]
pivotNode = pm.listConnections(meta_node.animate_pivot)[0]
# cache auto key state and turn it off
keyState = pm.autoKeyframe(query=True, state=True)
pm.autoKeyframe(state=False)
# query the tangents
#_in = pm.keyTangent(g=True, query=True, inTangentType=True)[0]
#_out = pm.keyTangent(g=True, query=True, outTangentType=True)[0]
'''
pm.setKeyframe(ctrlNode,
time=pm.getCurrentTime() -1,
insert=True,
attribute=['translate', 'rotate'])
'''
if extraKey:
pm.setKeyframe(ctrlNode,
time=[pm.getCurrentTime()-1],
inTangentType='linear',
outTangentType='linear',
attribute=['translate', 'rotate', 'AnimPivot'])
pm.setKeyframe(ctrlNode,
time=[pm.getCurrentTime()],
inTangentType='linear',
outTangentType='linear',
attribute=['translate', 'rotate', 'AnimPivot'])
# get our current control location (so we know how much to adjust it by after moving the pivot
ctrlPos = dt.Vector(pm.xform(ctrlNode, query=True, worldSpace=False,translation=True))
pivotPos = dt.Vector(pm.xform(pivotNode, query=True, worldSpace=True,translation=True))
# get the new pivot location
snapXform = pm.xform(snapNode, query=True, worldSpace=False,translation=True)
#
pre_attachXform = dt.Vector(pm.xform(attachNode, query=True, worldSpace=True,translation=True))
# move the pivot attribute
# pm.xform(pivotNode, worldSpace=False, translation=snapXform)
ctrlNode.AnimPivot.set(snapXform)
post_attachXform = dt.Vector(pm.xform(attachNode, query=True, worldSpace=True,translation=True))
post_pivotXform = dt.Vector(pm.xform(pivotNode, query=True, worldSpace=True,translation=True))
if not reverse:
pm.xform(ctrlNode, worldSpace=False, translation=pre_attachXform - post_attachXform + ctrlPos)
else:
pm.xform(ctrlNode, worldSpace=False, translation=pivotPos - post_pivotXform + ctrlPos)
pass
# pm.setKeyframe(pivotNode, inTangentType='linear', outTangentType='step')
pm.setKeyframe(ctrlNode, inTangentType='linear', outTangentType='linear', attribute=['translate', 'rotate', 'AnimPivot'])
# restore autokey state
pm.autoKeyframe(state=keyState)
pm.refresh()
if not pm.optionVar(exists='NT_xform_Pos_1'):
pm.optionVar(floatValue=('NT_xform_Pos_1', 0.0))
pm.optionVar(floatValueAppend=('NT_xform_Pos_1', 0.0))
pm.optionVar(floatValueAppend=('NT_xform_Pos_1', 0.0))
if not pm.optionVar(exists='NT_xform_Rot_1'):
pm.optionVar(floatValue=('NT_xform_Rot_1', 0.0))
pm.optionVar(floatValueAppend=('NT_xform_Rot_1', 0.0))
pm.optionVar(floatValueAppend=('NT_xform_Rot_1', 0.0))
|
tb-animator/tbtools
|
apps/tb_snaps.py
|
Python
|
mit
| 8,800
|
import os.path
import warnings
__version__ = (0, 0, 1)
def _get_git_revision(path):
revision_file = os.path.join(path, 'refs', 'heads', 'master')
if not os.path.exists(revision_file):
return None
fh = open(revision_file, 'r')
try:
return fh.read()
finally:
fh.close()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, '..'))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
__build__ = get_revision()
|
anscii/django-model-synonyms
|
msyn/__init__.py
|
Python
|
mit
| 744
|
# -*- coding: utf-8 -*-
"""Utils module."""
import os
from appdirs import user_data_dir
user_data_dir = user_data_dir("iqdb_tagger", "softashell")
default_db_path = os.path.join(user_data_dir, "iqdb.db")
thumb_folder = os.path.join(user_data_dir, "thumbs")
|
rachmadaniHaryono/iqdb_tagger
|
iqdb_tagger/utils.py
|
Python
|
mit
| 259
|
import math
import numpy as np
def calcTf(nAllTermsInDoc, nTermInDoc):
"""
# to unstable (most values are near to each other for all docs)
return math.log10(
(float(nTermInDoc) / float(nAllTermsInDoc))
+ 1
)
"""
return float(nTermInDoc) / float(nAllTermsInDoc)
def calcIdf(nAllDocuments, nDocumentsContainingTerm):
"""
return math.log10(
(float(nAllDocuments) / float(nDocumentsContainingTerm))
+ 1
)
"""
return float(nAllDocuments) / float(nDocumentsContainingTerm)
def calcWeight(tf, idf):
return tf * idf
def missingTermWeight():
return 0
def cosineSimilarity(docWeights, queryWeights):
cache = NormCache()
dj = np.array(docWeights)
q = np.array(queryWeights)
# use cache to store norms
dj_norm = np.linalg.norm(dj)
try:
q_norm = cache[tuple(queryWeights)]
except KeyError:
q_norm = np.linalg.norm(q)
cache[tuple(queryWeights)] = q_norm
#print(q, dj)
score = np.dot(dj, q) / (dj_norm * q_norm)
#print(score)
return float(score)
def euclDistance(docWeights, queryWeights):
dj = np.array(docWeights)
q = np.array(queryWeights)
#print(q, dj)
score = np.linalg.norm(dj - q)
#print(score)
return float(score)
class NormCache(object):
"""
Singleton Cache from http://python-3-patterns-idioms-test.readthedocs.io/en/latest/Singleton.html.
Instance Holder
"""
class __NormCache:
"""
Singleton Cache from http://python-3-patterns-idioms-test.readthedocs.io/en/latest/Singleton.html.
"""
def __init__(self):
self.cache = {}
def __str__(self):
return str(self.cache)
def __getitem__(self, key):
return self.cache[key]
def __setitem__(self, key, value):
self.cache[key] = value
def clear(self):
self.cache = {}
instance = None
def __new__(cls): # __new__ is always a classmethod
if not NormCache.instance:
NormCache.instance = NormCache.__NormCache()
return NormCache.instance
def __getattr__(self, name):
return getattr(self.instance, name)
def __setattr__(self, name, value):
return setattr(self.instance, name, value)
|
CodeLionX/CommentSearchEngine
|
cse/WeightCalculation.py
|
Python
|
mit
| 2,312
|
try:
import re
import sys
import socket
import struct
from functools import reduce
from lib.core.exceptions import CrowbarExceptions
except Exception as err:
from lib.core.exceptions import CrowbarExceptions
raise CrowbarExceptions(str(err))
class InvalidIPAddress(ValueError):
"""
The IP address given to ipaddr is improperly formatted
"""
class IpRange:
"""
Derived from http://www.randomwalking.com/snippets/iprange.text
"""
def ipaddr_to_binary(self, ipaddr):
q = ipaddr.split('.')
return reduce(lambda a, b: int(a) * 256 + int(b), q)
def binary_to_ipaddr(self, ipbinary):
return socket.inet_ntoa(struct.pack('!I', ipbinary))
def iprange(self, ipaddr):
span_re = re.compile(r'''(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) # The beginning IP address
\s*-\s*
(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) # The end IP address
''', re.VERBOSE)
res = span_re.match(ipaddr)
if res:
beginning = res.group(1)
end = res.group(2)
return span_iprange(beginning, end)
cidr_re = re.compile(r'''(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) # The IP address
/(\d{1,2}) # The mask
''', re.VERBOSE)
res = cidr_re.match(ipaddr)
if res:
addr = res.group(1)
cidrmask = res.group(2)
return self.cidr_iprange(addr, cidrmask)
wild_re = re.compile(r'''(\d{1,3}|\*)\.
(\d{1,3}|\*)\.
(\d{1,3}|\*)\.
(\d{1,3}|\*) # The IP address
''', re.VERBOSE)
res = wild_re.match(ipaddr)
if res:
return wildcard_iprange(ipaddr)
raise InvalidIPAddress
def span_iprange(self, beginning, end):
b = self.ipaddr_to_binary(beginning)
e = ipaddr_to_binary(end)
while (b <= e):
yield binary_to_ipaddr(b)
b = b + 1
def cidr_iprange(self, ipaddr, cidrmask):
mask = (int(2) ** int(32 - int(cidrmask))) - 1
b = self.ipaddr_to_binary(ipaddr)
e = self.ipaddr_to_binary(ipaddr)
b = int(b & ~mask)
e = int(e | mask)
while (b <= e):
yield self.binary_to_ipaddr(b)
b = b + 1
def wildcard_iprange(ipaddr):
beginning = []
end = []
tmp = ipaddr.split('.')
for i in tmp:
if i == '*':
beginning.append("0")
end.append("255")
else:
beginning.append(i)
end.append(i)
b = beginning[:]
e = end[:]
while int(b[0]) <= int(e[0]):
while int(b[1]) <= int(e[1]):
while int(b[2]) <= int(e[2]):
while int(b[3]) <= int(e[3]):
yield b[0] + '.' + b[1] + '.' + b[2] + '.' + b[3]
b[3] = "%d" % (int(b[3]) + 1)
b[2] = "%d" % (int(b[2]) + 1)
b[3] = beginning[3]
b[1] = "%d" % (int(b[1]) + 1)
b[2] = beginning[2]
b[0] = "%d" % (int(b[0]) + 1)
b[1] = beginning[1]
|
galkan/crowbar
|
lib/core/iprange.py
|
Python
|
mit
| 3,398
|
"""
Cria uma classe trem que possui carros. O trem é iterável e cada elemento
retornado pela iteração retorna um sring com o número do carro.
"""
class Train:
def __init__(self, cars):
self.cars = cars
def __len__(self): # len(train)
return self.cars
def __iter__(self):
return ('car #{}'.format(i + 1) for i in range(self.cars))
|
opensanca/trilha-python
|
02-python-oo/aula-05/exemplos/train.py
|
Python
|
mit
| 377
|
#!/usr/bin/python
# TODO: issues with new oauth2 stuff. Keep using older version of Python for now.
# #!/usr/bin/env python
from participantCollection import ParticipantCollection
import re
import datetime
import pyperclip
# Edit Me!
# This script gets run on the first day of the following month, and that month's URL is
# what goes here. E.g. If this directory is the directory for February, this script gets
# run on March 1, and this URL is the URL for the March challenge page.
nextMonthURL = "https://www.reddit.com/r/pornfree/comments/fbuz2o/stay_clean_march_this_thread_updated_daily_check/"
# If this directory is the directory for November, this script gets run on December 1,
# and currentMonthIndex gets the index of November, i.e. 11.
currentMonthIndex = datetime.date.today().month - 1
if currentMonthIndex == 0:
currentMonthIndex = 12
currentMonthName = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'}[currentMonthIndex]
nextMonthIndex = currentMonthIndex % 12 + 1
nextMonthName = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'}[nextMonthIndex]
participants = ParticipantCollection()
numberStillIn = participants.sizeOfParticipantsWhoAreStillIn()
initialNumber = participants.size()
percentStillIn = int(round(100 * numberStillIn / initialNumber, 0))
def templateForParticipants():
answer = ""
for participant in participants.participantsWhoAreStillInAndHaveCheckedIn():
answer += "/u/" + participant.name
answer += "\n\n"
return answer
def templateToUse():
answer = ""
answer += "The Stay Clean CURRENT_MONTH_NAME challenge is now over. Join us for **[the NEXT_MONTH_NAME challenge](NEXT_MONTH_URL)**.\n"
answer += "\n"
answer += "**NUMBER_STILL_IN** out of INITIAL_NUMBER participants made it all the way through the challenge. That's **PERCENT_STILL_IN%**.\n"
answer += "\n"
answer += "Congratulations to these participants, all of whom were victorious:\n\n"
answer += templateForParticipants()
return answer
def stringToPrint():
answer = templateToUse()
answer = re.sub('NUMBER_STILL_IN', str(numberStillIn), answer)
answer = re.sub('INITIAL_NUMBER', str(initialNumber), answer)
answer = re.sub('PERCENT_STILL_IN', str(percentStillIn), answer)
answer = re.sub('CURRENT_MONTH_INDEX', str(currentMonthIndex), answer)
answer = re.sub('CURRENT_MONTH_NAME', currentMonthName, answer)
answer = re.sub('NEXT_MONTH_INDEX', str(nextMonthIndex), answer)
answer = re.sub('NEXT_MONTH_NAME', nextMonthName, answer)
answer = re.sub('NEXT_MONTH_URL', nextMonthURL, answer)
return answer
outputString = stringToPrint()
print "============================================================="
print outputString
print "============================================================="
pyperclip.copy(outputString)
|
foobarbazblarg/stayclean
|
stayclean-2020-february/display-final-after-month-is-over.py
|
Python
|
mit
| 3,059
|
from praw import Reddit
from nltk.sentiment.vader import SentimentIntensityAnalyzer
from textblob import TextBlob
import matplotlib.pyplot as plt
plt.style.use('ggplot')
import numpy as np
from datetime import datetime
import matplotlib.dates as mdates
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
"""
Hutto, C.J. & Gilbert, E.E. (2014). VADER: A Parsimonious Rule-based Model for Sentiment Analysis of Social Media Text. Eighth International Conference on Weblogs and Social Media (ICWSM-14). Ann Arbor, MI, June 2014.
"""
class RedditData():
def __init__(self,subreddit_name,limit=30):
self.reddit = Reddit(client_id='N5kDJcWEEK7BDw',
client_secret='teB5Ykrjyh_ygkeGKk8mOrAyBRU',
password='fuckfuck77',
user_agent='SentiSub',
username='gregstarr')
self.subreddit = self.reddit.subreddit(subreddit_name)
self.getSubmissions(limit)
self.dd,self.hd,self.md,self.dt,self.ht,self.mt = self.getTraffic()
def getSubmissions(self,limit=1):
self.submissions = []
for submission in self.subreddit.top('month',limit=limit):
if not submission.stickied:
self.submissions.append(submission)
def grabComments(self):
self.comments = []
for s in self.submissions:
print(s.title)
s.comments.replace_more(limit=0)
comment_queue = s.comments[:] # Seed with top-level
while comment_queue:
comment = comment_queue.pop(0)
self.comments.append(comment.body)
comment_queue.extend(comment.replies)
def analyzeComments(self):
sid = SentimentIntensityAnalyzer()
vader_data = np.empty((len(self.comments),4),float)
self.tb_data = np.empty((len(self.comments),2),float)
for comment in self.comments:
i = self.comments.index(comment)
tb = TextBlob(comment)
self.tb_data[i,:] = tb.sentiment
vader_data[i,:] = list(sid.polarity_scores(comment).values())
self.tb_data = self.tb_data[:,0]
self.vader_diff = vader_data[:,3]-vader_data[:,2]
self.vader_comp = vader_data[:,1][np.isfinite(vader_data[:,1])]
self.vader_diff = self.vader_diff[np.logical_and(self.vader_diff>=-1,self.vader_diff<=1)]
self.tb_data = self.tb_data[np.logical_and(self.tb_data>=-1,self.tb_data<=1)]
print('{} comments analyzed'.format(len(comments)))
def plotSentiment(self):
fig = plt.figure()
fig.suptitle('Sentiment')
ax1 = fig.add_subplot(311)
bins = np.arange(-1.05,1.05,.1)
n,bins,patches = ax1.hist(self.tb_data,bins)
ax1.set_xlim([-1,1])
ax1.set_ylabel('TextBlob'.format(np.mean(self.tb_data)))
ax2 = fig.add_subplot(312)
ax2.hist(self.vader_diff,bins)
ax2.set_ylabel('NLTK Difference'.format(np.mean(self.vader_diff)))
plt.set_xlim([-1,1])
ax3 = fig.add_subplot(313)
ax3.hist(self.vader_comp,bins)
ax3.set_ylabel('NLTK Compound'.format(np.mean(self.vader_comp)))
ax3.set_xlim([-1,1])
return FigureCanvas(fig)
def getTraffic(self):
"""
day - [epoch time, uniques, pageviews, subscriptions]
hour and month - [epoch time, uniques, pageviews]
"""
traffic_data = self.subreddit.traffic()
day_data = np.array(traffic_data['day'])
hour_data = np.array(traffic_data['hour'])
month_data = np.array(traffic_data['month'])
day_times = np.array([datetime.fromtimestamp(timestamp) for timestamp in day_data[:,0]])
month_times = np.array([datetime.fromtimestamp(timestamp) for timestamp in month_data[:,0]])
hour_times = np.array([datetime.fromtimestamp(timestamp) for timestamp in hour_data[:,0]])
return day_data[:,1:],hour_data[:,1:],month_data[:,1:],day_times,hour_times,month_times
def plotTraffic(self):
day_fig = plt.figure()
day_fig.suptitle('Daily')
ax1 = day_fig.add_subplot(311)
ax1.plot(self.dt,self.dd[:,0])
ax1.set_ylabel('Unique Visits')
ax2 = day_fig.add_subplot(312,sharex=ax1)
ax2.plot(self.dt,self.dd[:,1])
ax2.set_ylabel('Pageviews')
ax3 = day_fig.add_subplot(313,sharex=ax1)
ax3.plot(self.dt,self.dd[:,2])
ax3.set_ylabel('Subscribers')
day_fig.autofmt_xdate()
hour_fig = plt.figure()
hour_fig.suptitle('Hourly')
hour_fmt = mdates.DateFormatter('%b %d %I:%M %p')
ax1 = hour_fig.add_subplot(211)
ax1.plot(self.ht,self.hd[:,0])
ax1.set_ylabel('Unique Visits')
ax1.xaxis.set_major_formatter(hour_fmt)
ax2 = hour_fig.add_subplot(212,sharex=ax1)
ax2.plot(self.ht,self.hd[:,1])
ax2.set_ylabel('Pageviews')
hour_fig.autofmt_xdate()
month_fig = plt.figure()
month_fig.suptitle('Monthly')
ax1 = month_fig.add_subplot(211)
ax1.plot(self.mt,self.md[:,0])
ax1.set_ylabel('Unique Visits')
ax2 = month_fig.add_subplot(212,sharex=ax1)
ax2.plot(self.mt,self.md[:,1])
ax2.set_ylabel('Pageviews')
month_fig.autofmt_xdate()
return FigureCanvas(day_fig),FigureCanvas(hour_fig),FigureCanvas(month_fig)
|
gregstarr/SentiSub
|
reddiTest.py
|
Python
|
mit
| 5,832
|
#!/usr/bin/python
# -- coding: utf-8 --
import sys,thread,string,requests,time,random
from socket import *
from binascii import hexlify,unhexlify
### DEBUG SEND REQUESTS ###
#import httplib
#
#def patch_send():
# old_send= httplib.HTTPConnection.send
# def new_send( self, data ):
# print data
# return old_send(self, data)
# httplib.HTTPConnection.send= new_send
#
#patch_send()
###########################
charset = string.ascii_letters + string.digits + '=+/%'
cookie_size = 20
SESSIONID = ''.join(random.choice(charset) for x in range(cookie_size))
ua_list = [
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/32.0.1700.102 Chrome/32.0.1700.102 Safari/537.36',
'Lynx/2.8.5dev.16 libwww-FM/2.14 SSL-MM/1.4.1 OpenSSL/0.9.7a',
'Opera/9.80 (Linux armv6l ; U; CE-HTML/1.0 NETTV/3.0.1;; en) Presto/2.6.33 Version/10.60',
'NCSA_Mosaic/2.6 (X11; SunOS 4.1.3 sun4m)',
'Midori/0.2 (X11; Linux; U; cs-cz) WebKit/531.2+',
'python-requests/1.2.3 CPython/2.7.3 Linux/3.5.0-45-generic',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0; Avant Browser [avantbrowser.com]; Hotbar 4.4.5.0)',
'ELinks/0.10.4-7.1-debian (textmode; Linux 2.4.27-amiga m68k; 80x32-3)',
'Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.1; AOLBuild 4334.5000; Windows NT 5.1; Media Center PC 3.0; .NET CLR 1.0.3705; .NET CLR 1.1.4322; InfoPath.1)',
'Emacs-W3/4.0pre.46 URL/p4.0pre.46 (i686-pc-linux; X11)',
'Mozilla/5.0 (X11; U; Darwin Power Macintosh; en-US; rv:1.8.0.12) Gecko/20070803 Firefox/1.5.0.12 Fink Community Edition',
'Jakarta Commons-HttpClient/2.0.1',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/536.26.17 (KHTML, like Gecko) Version/6.0.2 Safari/536.26.17',
'mothra/Jul-10-17:33:30-EDT-2006',
'Sqworm/2.9.85-BETA (beta_release; 20011115-775; i686-pc-linux',
'curl/7.19.5 (i586-pc-mingw32msvc) libcurl/7.19.5 OpenSSL/0.9.8l zlib/1.2.3',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)',
'AmigaVoyager/3.4.4 (MorphOS/PPC native)',
'xChaos_Arachne/5.1.89;GPL,386+',
'Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727)',
]
headers = {
'Content-Length': '789',
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, compress',
'Content-Type': 'application/x-www-form-urlencoded',
}
cookies = {'SESSIONID': SESSIONID}
ip='127.0.0.1'
port=6666
s = socket(AF_INET,SOCK_STREAM)
s.setsockopt(SOL_SOCKET,SO_REUSEADDR,1)
s.bind((ip,port))
s.listen(0)
buffer_size=256
def handler(clientsock,addr):
data = conn.recv(buffer_size)
hexcookie,ua = data.split(',')
cookie = unhexlify(hexcookie)
ua_string = ua_list[int(ua)]
scheme = 'https://'
url = scheme + host + '/'
headers['User-Agent'] = ua_string
data = '''POST / HTTP/1.1\r
Host: www.example.com\r
Content-Length: 789\r
Accept-Encoding: gzip, deflate, compress\r
Accept: */*\r
User-Agent: %s\r
Cookie: SESSIONID=%s\r
''' % (ua_string,cookie)
r = requests.post(url,headers=headers,cookies=cookies,data=data,verify=False,config={'encode_uri': False})
if __name__ == '__main__':
host = 'www.example.com'
print "\n[info] Browser listening for requests ..."
print "[info] Random cookie = %s" % SESSIONID
while True:
conn, addr = s.accept()
thread.start_new_thread(handler,(conn,addr))
|
jgrmnprz/mycrime
|
myrequests_srv.py
|
Python
|
mit
| 3,474
|
#!/usr/bin/env python
# Helpful little script that spits out a comma-separated list of
# language codes for Qt icons that should be included
# in binary Agoldcoin distributions
import glob
import os
import re
import sys
if len(sys.argv) != 3:
sys.exit("Usage: %s $QTDIR/translations $AgoldcoinDIR/src/qt/locale"%sys.argv[0])
d1 = sys.argv[1]
d2 = sys.argv[2]
l1 = set([ re.search(r'qt_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d1, 'qt_*.qm')) ])
l2 = set([ re.search(r'Agoldcoin_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d2, 'Agoldcoin_*.qm')) ])
print ",".join(sorted(l1.intersection(l2)))
|
icardgod/A-Gold-Coin
|
contrib/qt_translations.py
|
Python
|
mit
| 624
|
#!/usr/bin/env python
# encoding=utf-8
from __future__ import print_function
import os
import sys
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
except ImportError:
print("This package requires 'setuptools' to be installed.")
sys.exit(1)
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = '-v --pep8 tests mwclient --cov mwclient'
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# Import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
requirements = ['requests', 'six']
if sys.version_info < (2, 7):
requirements.append('ordereddict')
setup(name='mwclient',
version='0.7.2', # Rember to also update __ver__ in client.py
description='MediaWiki API client',
long_description=README,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
],
keywords='mediawiki wikipedia',
author='Bryan Tong Minh',
author_email='bryan.tongminh@gmail.com',
url='https://github.com/btongminh/mwclient',
license='MIT',
packages=['mwclient'],
cmdclass={'test': PyTest},
tests_require=['pytest-pep8', 'pytest-cache', 'pytest', 'pytest-cov', 'funcsigs', 'responses>=0.3.0'],
install_requires=requirements,
zip_safe=True
)
|
Jobava/mirror-mwclient
|
setup.py
|
Python
|
mit
| 1,841
|
import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'MODEL1006230098.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString)
|
biomodels/MODEL1006230098
|
MODEL1006230098/model.py
|
Python
|
cc0-1.0
| 427
|
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mexbtcapi',
version='0.2',
description="The Multi-Exchange Bitcoin API",
long_description=read('README.md'),
author="Goncalo Pinheira",
author_email="goncalopp+pypi@quorumverita.com",
url='https://github.com/goncalopp/mexbtcapi',
license='CC0',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
zip_safe=False,
install_requires=[
'six',
'requests', #for python-poloniex
'autobahn-sync', #for poloniex streaming api
],
setup_requires = ['nose', 'mock'],
test_suite = 'nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries',
],
)
|
goncalopp/mexbtcapi
|
setup.py
|
Python
|
cc0-1.0
| 1,144
|
from itertools import dropwhile
def least_divisor(k,n):
while k < n:
if n%k == 0: return k
else: k +=1
return n
def prime_factors(n):
k = 2
N = n
factors = []
while N > 1:
k = least_divisor(k,N)
factors.append(k)
N = N/k
return factors
def solution():
n = 600851475143
return max(prime_factors(n))
if __name__ == "__main__":
print(solution())
|
drcabana/euler-fp
|
source/py/p03.py
|
Python
|
epl-1.0
| 444
|
#!/usr/bin/env python
# -*- coding: utf-8; -*-
#
# Copyright (C) 2007-2013 Guake authors
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301 USA
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gconf
import gobject
import gtk
import logging
import os
import re
import warnings
from pango import FontDescription
from guake.common import ShowableError
from guake.common import _
from guake.common import get_binaries_from_path
from guake.common import gladefile
from guake.common import hexify_color
from guake.common import pixmapfile
from guake.globals import ALIGN_CENTER
from guake.globals import ALIGN_LEFT
from guake.globals import ALIGN_RIGHT
from guake.globals import ALWAYS_ON_PRIMARY
from guake.globals import GKEY
from guake.globals import KEY
from guake.globals import LKEY
from guake.globals import LOCALE_DIR
from guake.globals import NAME
from guake.palettes import PALETTES
from guake.simplegladeapp import SimpleGladeApp
from guake.simplegladeapp import bindtextdomain
from guake.terminal import GuakeTerminal
from guake.terminal import QUICK_OPEN_MATCHERS
log = logging.getLogger(__name__)
# A regular expression to match possible python interpreters when
# filling interpreters combo in preferences (including bpython and ipython)
PYTHONS = re.compile(r'^[a-z]python$|^python\d\.\d$')
# Path to the shells file, it will be used to start to populate
# interpreters combo, see the next variable, its important to fill the
# rest of the combo too.
SHELLS_FILE = '/etc/shells'
# string to show in prefereces dialog for user shell option
USER_SHELL_VALUE = _('<user shell>')
# translating our types to vte types
ERASE_BINDINGS = {'ASCII DEL': 'ascii-delete',
'Escape sequence': 'delete-sequence',
'Control-H': 'ascii-backspace'}
HOTKEYS = [
{'label': 'General',
'keys': [{'key': GKEY('show_hide'),
'label': 'Toggle Guake visibility'},
{'key': LKEY('toggle_fullscreen'),
'label': 'Toggle Fullscreen'},
{'key': LKEY('toggle_hide_on_lose_focus'),
'label': 'Toggle Hide on Lose Focus'},
{'key': LKEY('quit'),
'label': 'Quit'},
{'key': LKEY('reset_terminal'),
'label': 'Reset terminal'},
]},
{'label': 'Tab management',
'keys': [{'key': LKEY('new_tab'),
'label': 'New tab'},
{'key': LKEY('close_tab'),
'label': 'Close tab'},
{'key': LKEY('rename_current_tab'),
'label': 'Rename current tab'},
]},
{'label': 'Navigation',
'keys': [{'key': LKEY('previous_tab'),
'label': 'Go to previous tab'},
{'key': LKEY('next_tab'),
'label': 'Go to next tab'},
{'key': LKEY('move_tab_left'),
'label': 'Move current tab left'},
{'key': LKEY('move_tab_right'),
'label': 'Move current tab right'},
{'key': LKEY('switch_tab1'),
'label': 'Go to first tab'},
{'key': LKEY('switch_tab2'),
'label': 'Go to second tab'},
{'key': LKEY('switch_tab3'),
'label': 'Go to third tab'},
{'key': LKEY('switch_tab4'),
'label': 'Go to fourth tab'},
{'key': LKEY('switch_tab5'),
'label': 'Go to fifth tab'},
{'key': LKEY('switch_tab6'),
'label': 'Go to sixth tab'},
{'key': LKEY('switch_tab7'),
'label': 'Go to seventh tab'},
{'key': LKEY('switch_tab8'),
'label': 'Go to eighth tab'},
{'key': LKEY('switch_tab9'),
'label': 'Go to ninth tab'},
{'key': LKEY('switch_tab10'),
'label': 'Go to tenth tab'},
]},
{'label': 'Appearance',
'keys': [{'key': LKEY('zoom_out'),
'label': 'Zoom out'},
{'key': LKEY('zoom_in'),
'label': 'Zoom in'},
{'key': LKEY('zoom_in_alt'),
'label': 'Zoom in (alternative)'},
{'key': LKEY('increase_height'),
'label': 'Increase height'},
{'key': LKEY('decrease_height'),
'label': 'Decrease height'},
{'key': LKEY('increase_transparency'),
'label': 'Increase transparency'},
{'key': LKEY('decrease_transparency'),
'label': 'Decrease transparency'},
{'key': LKEY('toggle_transparency'),
'label': 'Toggle transparency'}
]},
{'label': 'Clipboard',
'keys': [{'key': LKEY('clipboard_copy'),
'label': 'Copy text to clipboard'},
{'key': LKEY('clipboard_paste'),
'label': 'Paste text from clipboard'},
]},
{'label': 'Extra features',
'keys': [
{'key': LKEY('search_on_web'),
'label': 'Search select text on web'},
]},
]
class PrefsCallbacks(object):
"""Holds callbacks that will be used in the PrefsDialg class.
"""
def __init__(self):
self.client = gconf.client_get_default()
# general tab
def on_default_shell_changed(self, combo):
"""Changes the activity of default_shell in gconf
"""
citer = combo.get_active_iter()
if not citer:
return
shell = combo.get_model().get_value(citer, 0)
# we unset the value (restore to default) when user chooses to use
# user shell as guake shell interpreter.
if shell == USER_SHELL_VALUE:
self.client.unset(KEY('/general/default_shell'))
else:
self.client.set_string(KEY('/general/default_shell'), shell)
def on_use_login_shell_toggled(self, chk):
"""Changes the activity of use_login_shell in gconf
"""
self.client.set_bool(KEY('/general/use_login_shell'), chk.get_active())
def on_open_tab_cwd_toggled(self, chk):
"""Changes the activity of open_tab_cwd in gconf
"""
self.client.set_bool(KEY('/general/open_tab_cwd'), chk.get_active())
def on_use_trayicon_toggled(self, chk):
"""Changes the activity of use_trayicon in gconf
"""
self.client.set_bool(KEY('/general/use_trayicon'), chk.get_active())
def on_use_popup_toggled(self, chk):
"""Changes the activity of use_popup in gconf
"""
self.client.set_bool(KEY('/general/use_popup'), chk.get_active())
def on_prompt_on_quit_toggled(self, chk):
"""Set the `prompt on quit' property in gconf
"""
self.client.set_bool(KEY('/general/prompt_on_quit'), chk.get_active())
def on_prompt_on_close_tab_changed(self, combo):
"""Set the `prompt_on_close_tab' property in gconf
"""
self.client.set_int(KEY('/general/prompt_on_close_tab'), combo.get_active())
def on_window_ontop_toggled(self, chk):
"""Changes the activity of window_ontop in gconf
"""
self.client.set_bool(KEY('/general/window_ontop'), chk.get_active())
def on_tab_ontop_toggled(self, chk):
"""Changes the activity of tab_ontop in gconf
"""
self.client.set_bool(KEY('/general/tab_ontop'), chk.get_active())
def on_quick_open_enable_toggled(self, chk):
"""Changes the activity of quick_open_enable in gconf
"""
self.client.set_bool(KEY('/general/quick_open_enable'), chk.get_active())
def on_quick_open_in_current_terminal_toggled(self, chk):
self.client.set_bool(KEY('/general/quick_open_in_current_terminal'), chk.get_active())
def on_startup_script_changed(self, edt):
self.client.set_string(KEY('/general/startup_script'), edt.get_text())
def on_window_losefocus_toggled(self, chk):
"""Changes the activity of window_losefocus in gconf
"""
self.client.set_bool(KEY('/general/window_losefocus'), chk.get_active())
def on_quick_open_command_line_changed(self, edt):
self.client.set_string(KEY('/general/quick_open_command_line'), edt.get_text())
def on_window_tabbar_toggled(self, chk):
"""Changes the activity of window_tabbar in gconf
"""
self.client.set_bool(KEY('/general/window_tabbar'), chk.get_active())
def on_start_fullscreen_toggled(self, chk):
"""Changes the activity of start_fullscreen in gconf
"""
self.client.set_bool(KEY('/general/start_fullscreen'), chk.get_active())
def on_use_vte_titles_toggled(self, chk):
"""Save `use_vte_titles` property value in gconf
"""
self.client.set_bool(KEY('/general/use_vte_titles'), chk.get_active())
def on_max_tab_name_length_changed(self, spin):
"""Changes the value of max_tab_name_length in gconf
"""
val = int(spin.get_value())
self.client.set_int(KEY('/general/max_tab_name_length'), val)
def on_mouse_display_toggled(self, chk):
"""Set the 'appear on mouse display' preference in gconf. This
property supercedes any value stored in display_n.
"""
self.client.set_bool(KEY('/general/mouse_display'), chk.get_active())
def on_right_align_toggled(self, chk):
"""set the horizontal alignment setting.
"""
v = chk.get_active()
self.client.set_int(KEY('/general/window_halignment'), 1 if v else 0)
def on_bottom_align_toggled(self, chk):
"""set the vertical alignment setting.
"""
v = chk.get_active()
self.client.set_int(KEY('/general/window_valignment'), 1 if v else 0)
def on_display_n_changed(self, combo):
"""Set the destination display in gconf.
"""
i = combo.get_active_iter()
if not i:
return
model = combo.get_model()
first_item_path = model.get_path(model.get_iter_first())
if model.get_path(i) == first_item_path:
val_int = ALWAYS_ON_PRIMARY
else:
val = model.get_value(i, 0)
val_int = int(val.split()[0]) # extracts 1 from '1' or from '1 (primary)'
self.client.set_int(KEY('/general/display_n'), val_int)
def on_window_height_value_changed(self, hscale):
"""Changes the value of window_height in gconf
"""
val = hscale.get_value()
self.client.set_int(KEY('/general/window_height'), int(val))
self.client.set_float(KEY('/general/window_height_f'), float(val))
def on_window_width_value_changed(self, wscale):
"""Changes the value of window_width in gconf
"""
val = wscale.get_value()
self.client.set_int(KEY('/general/window_width'), int(val))
self.client.set_float(KEY('/general/window_width_f'), float(val))
def on_window_halign_value_changed(self, halign_button):
"""Changes the value of window_halignment in gconf
"""
if halign_button.get_active():
which_align = {
'radiobutton_align_left': ALIGN_LEFT,
'radiobutton_align_right': ALIGN_RIGHT,
'radiobutton_align_center': ALIGN_CENTER
}
self.client.set_int(KEY('/general/window_halignment'),
which_align[halign_button.get_name()])
def on_use_visible_bell_toggled(self, chk):
"""Changes the value of use_visible_bell in gconf
"""
self.client.set_bool(KEY('/general/use_visible_bell'), chk.get_active())
def on_use_audible_bell_toggled(self, chk):
"""Changes the value of use_audible_bell in gconf
"""
self.client.set_bool(KEY('/general/use_audible_bell'), chk.get_active())
# scrolling tab
def on_use_scrollbar_toggled(self, chk):
"""Changes the activity of use_scrollbar in gconf
"""
self.client.set_bool(KEY('/general/use_scrollbar'), chk.get_active())
def on_history_size_value_changed(self, spin):
"""Changes the value of history_size in gconf
"""
val = int(spin.get_value())
self.client.set_int(KEY('/general/history_size'), val)
def on_scroll_output_toggled(self, chk):
"""Changes the activity of scroll_output in gconf
"""
self.client.set_bool(KEY('/general/scroll_output'), chk.get_active())
def on_scroll_keystroke_toggled(self, chk):
"""Changes the activity of scroll_keystroke in gconf
"""
self.client.set_bool(KEY('/general/scroll_keystroke'), chk.get_active())
# appearance tab
def on_use_default_font_toggled(self, chk):
"""Changes the activity of use_default_font in gconf
"""
self.client.set_bool(KEY('/general/use_default_font'), chk.get_active())
def on_allow_bold_toggled(self, chk):
"""Changes the value of allow_bold in gconf
"""
self.client.set_bool(KEY('/style/font/allow_bold'), chk.get_active())
def on_use_palette_font_and_background_color_toggled(self, chk):
"""Changes the activity of use_palette_font_and_background_color in gconf
"""
self.client.set_bool(
KEY('/general/use_palette_font_and_background_color'), chk.get_active())
def on_font_style_font_set(self, fbtn):
"""Changes the value of font_style in gconf
"""
self.client.set_string(KEY('/style/font/style'), fbtn.get_font_name())
def on_font_color_color_set(self, btn):
"""Changes the value of font_color in gconf
"""
color = hexify_color(btn.get_color())
self.client.set_string(KEY('/style/font/color'), color)
def on_background_color_color_set(self, btn):
"""Changes the value of background_color in gconf
"""
color = hexify_color(btn.get_color())
self.client.set_string(KEY('/style/background/color'), color)
def on_background_image_changed(self, btn):
"""Changes the value of background_image in gconf
"""
filename = btn.get_filename()
if os.path.isfile(filename or ''):
self.client.set_string(KEY('/style/background/image'), filename)
def on_transparency_value_changed(self, hscale):
"""Changes the value of background_transparency in gconf
"""
value = hscale.get_value()
self.client.set_int(KEY('/style/background/transparency'), int(value))
# compatibility tab
def on_backspace_binding_changed(self, combo):
"""Changes the value of compat_backspace in gconf
"""
val = combo.get_active_text()
self.client.set_string(KEY('/general/compat_backspace'),
ERASE_BINDINGS[val])
def on_delete_binding_changed(self, combo):
"""Changes the value of compat_delete in gconf
"""
val = combo.get_active_text()
self.client.set_string(KEY('/general/compat_delete'),
ERASE_BINDINGS[val])
def on_custom_command_file_chooser_file_changed(self, filechooser):
self.client.set_string(KEY('/general/custom_command_file'), filechooser.get_filename())
class PrefsDialog(SimpleGladeApp):
"""The Guake Preferences dialog.
"""
def __init__(self):
"""Setup the preferences dialog interface, loading images,
adding filters to file choosers and connecting some signals.
"""
super(PrefsDialog, self).__init__(gladefile('prefs.glade'),
root='config-window')
self.add_callbacks(PrefsCallbacks())
self.client = gconf.client_get_default()
# setting evtbox title bg
eventbox = self.get_widget('eventbox-title')
eventbox.modify_bg(gtk.STATE_NORMAL,
eventbox.get_colormap().alloc_color("#ffffff"))
# images
ipath = pixmapfile('guake-notification.png')
self.get_widget('image_logo').set_from_file(ipath)
ipath = pixmapfile('quick-open.png')
self.get_widget('image_quick_open').set_from_file(ipath)
# the first position in tree will store the keybinding path in gconf,
# and the user doesn't worry with this, let's hide that =D
model = gtk.TreeStore(str, str, object, bool)
treeview = self.get_widget('treeview-keys')
treeview.set_model(model)
treeview.set_rules_hint(True)
treeview.connect('button-press-event', self.start_editing)
renderer = gtk.CellRendererText()
column = gtk.TreeViewColumn('keypath', renderer, text=0)
column.set_visible(False)
treeview.append_column(column)
renderer = gtk.CellRendererText()
column = gtk.TreeViewColumn(_('Action'), renderer, text=1)
column.set_property('expand', True)
treeview.append_column(column)
renderer = gtk.CellRendererAccel()
renderer.set_property('editable', True)
renderer.connect('accel-edited', self.on_key_edited, model)
renderer.connect('accel-cleared', self.on_key_cleared, model)
column = gtk.TreeViewColumn(_('Shortcut'), renderer)
column.set_cell_data_func(renderer, self.cell_data_func)
column.set_property('expand', False)
treeview.append_column(column)
self.demo_terminal = GuakeTerminal()
demo_terminal_box = self.get_widget('demo_terminal_box')
demo_terminal_box.add(self.demo_terminal)
default_params = {}
pid = self.demo_terminal.fork_command(**default_params)
self.demo_terminal.pid = pid
self.populate_shell_combo()
self.populate_keys_tree()
self.populate_display_n()
self.load_configs()
self.get_widget('config-window').hide()
# Preview when selecting a bgimage
self.selection_preview = gtk.Image()
self.file_filter = gtk.FileFilter()
self.file_filter.add_pattern("*.jpg")
self.file_filter.add_pattern("*.png")
self.file_filter.add_pattern("*.svg")
self.file_filter.add_pattern("*.jpeg")
self.bgfilechooser = self.get_widget('background_image')
self.bgfilechooser.set_preview_widget(self.selection_preview)
self.bgfilechooser.set_filter(self.file_filter)
self.bgfilechooser.connect('update-preview', self.update_preview,
self.selection_preview)
def show(self):
"""Calls the main window show_all method and presents the
window in the desktop.
"""
self.get_widget('config-window').show_all()
self.get_widget('config-window').present()
def hide(self):
"""Calls the main window hide function.
"""
self.get_widget('config-window').hide()
def update_preview(self, file_chooser, preview):
"""Used by filechooser to preview image files
"""
filename = file_chooser.get_preview_filename()
if filename and os.path.isfile(filename or ''):
try:
mkpb = gtk.gdk.pixbuf_new_from_file_at_size
pixbuf = mkpb(filename, 256, 256)
preview.set_from_pixbuf(pixbuf)
file_chooser.set_preview_widget_active(True)
except gobject.GError:
# this exception is raised when user chooses a
# non-image file or a directory
warnings.warn('File %s is not an image' % filename)
else:
file_chooser.set_preview_widget_active(False)
def toggle_prompt_on_quit_sensitivity(self, combo):
"""If toggle_on_close_tabs is set to 2 (Always), prompt_on_quit has no
effect.
"""
self.get_widget('prompt_on_quit').set_sensitive(combo.get_active() != 2)
def toggle_style_sensitivity(self, chk):
"""If the user chooses to use the gnome default font
configuration it means that he will not be able to use the
font selector.
"""
self.get_widget('font_style').set_sensitive(not chk.get_active())
def toggle_use_font_background_sensitivity(self, chk):
"""If the user chooses to use the gnome default font
configuration it means that he will not be able to use the
font selector.
"""
self.get_widget('palette_16').set_sensitive(chk.get_active())
self.get_widget('palette_17').set_sensitive(chk.get_active())
def toggle_display_n_sensitivity(self, chk):
"""When the user unchecks 'on mouse display', the option to select an
alternate display should be enabeld.
"""
self.get_widget('display_n').set_sensitive(not chk.get_active())
def toggle_quick_open_command_line_sensitivity(self, chk):
"""When the user unchecks 'enable quick open', the command line should be disabled
"""
self.get_widget('quick_open_command_line').set_sensitive(chk.get_active())
self.get_widget('quick_open_in_current_terminal').set_sensitive(chk.get_active())
def clear_background_image(self, btn):
"""Unset the gconf variable that holds the name of the
background image of all terminals.
"""
self.client.unset(KEY('/style/background/image'))
self.bgfilechooser.unselect_all()
def on_reset_compat_defaults_clicked(self, bnt):
"""Reset default values to compat_{backspace,delete} gconf
keys. The default values are retrivied from the guake.schemas
file.
"""
self.client.unset(KEY('/general/compat_backspace'))
self.client.unset(KEY('/general/compat_delete'))
self.reload_erase_combos()
def on_palette_name_changed(self, combo):
"""Changes the value of palette in gconf
"""
palette_name = combo.get_active_text()
if palette_name not in PALETTES:
return
self.client.set_string(KEY('/style/font/palette'),
PALETTES[palette_name])
self.client.set_string(KEY('/style/font/palette_name'), palette_name)
self.set_palette_colors(PALETTES[palette_name])
self.update_demo_palette(PALETTES[palette_name])
def on_cursor_shape_changed(self, combo):
"""Changes the value of cursor_shape in gconf
"""
index = combo.get_active()
self.client.set_int(KEY('/style/cursor_shape'), index)
def on_blink_cursor_toggled(self, chk):
"""Changes the value of blink_cursor in gconf
"""
self.client.set_int(KEY('/style/cursor_blink_mode'), chk.get_active())
def on_palette_color_set(self, btn):
"""Changes the value of palette in gconf
"""
palette = []
for i in range(18):
palette.append(hexify_color(
self.get_widget('palette_%d' % i).get_color()))
palette = ':'.join(palette)
self.client.set_string(KEY('/style/font/palette'), palette)
self.client.set_string(KEY('/style/font/palette_name'), _('Custom'))
self.set_palette_name('Custom')
self.update_demo_palette(palette)
def set_palette_name(self, palette_name):
"""If the given palette matches an existing one, shows it in the
combobox
"""
combo = self.get_widget('palette_name')
found = False
log.debug("wanting palette: %r", palette_name)
for i in combo.get_model():
if i[0] == palette_name:
combo.set_active_iter(i.iter)
found = True
break
if not found:
combo.set_active(self.custom_palette_index)
def update_demo_palette(self, palette):
fgcolor = gtk.gdk.color_parse(
self.client.get_string(KEY('/style/font/color')))
bgcolor = gtk.gdk.color_parse(
self.client.get_string(KEY('/style/background/color')))
palette = [gtk.gdk.color_parse(color) for color in palette.split(':')]
font_name = self.client.get_string(KEY('/style/font/style'))
font = FontDescription(font_name)
use_palette_font_and_background_color = self.client.get_bool(
KEY('/general/use_palette_font_and_background_color'))
if use_palette_font_and_background_color and len(palette) > 16:
fgcolor = palette[16]
bgcolor = palette[17]
self.demo_terminal.set_color_dim(fgcolor)
self.demo_terminal.set_color_foreground(fgcolor)
self.demo_terminal.set_color_bold(fgcolor)
self.demo_terminal.set_color_background(bgcolor)
self.demo_terminal.set_background_tint_color(bgcolor)
self.demo_terminal.set_colors(fgcolor, bgcolor, palette[:16])
self.demo_terminal.set_font(font)
def fill_palette_names(self):
combo = self.get_widget('palette_name')
for palette_name in sorted(PALETTES.keys()):
combo.append_text(palette_name)
self.custom_palette_index = len(PALETTES)
combo.append_text(_('Custom'))
def set_cursor_shape(self, shape_index):
self.get_widget('cursor_shape').set_active(shape_index)
def set_cursor_blink_mode(self, mode_index):
self.get_widget('cursor_blink_mode').set_active(mode_index)
def set_palette_colors(self, palette):
"""Updates the color buttons with the given palette
"""
palette = palette.split(':')
for i in range(len(palette)):
color = gtk.gdk.color_parse(palette[i])
self.get_widget('palette_%d' % i).set_color(color)
def reload_erase_combos(self, btn=None):
"""Read from gconf the value of compat_{backspace,delete} vars
and select the right option in combos.
"""
# backspace erase binding
combo = self.get_widget('backspace-binding-combobox')
binding = self.client.get_string(KEY('/general/compat_backspace'))
for i in combo.get_model():
if ERASE_BINDINGS.get(i[0]) == binding:
combo.set_active_iter(i.iter)
# delete erase binding
combo = self.get_widget('delete-binding-combobox')
binding = self.client.get_string(KEY('/general/compat_delete'))
for i in combo.get_model():
if ERASE_BINDINGS.get(i[0]) == binding:
combo.set_active_iter(i.iter)
def load_configs(self):
"""Load configurations for all widgets in General, Scrolling
and Appearance tabs from gconf.
"""
# default_shell
combo = self.get_widget('default_shell')
# get the value for defualt shell. If unset, set to USER_SHELL_VALUE.
value = self.client.get_string(KEY('/general/default_shell')) or USER_SHELL_VALUE
for i in combo.get_model():
if i[0] == value:
combo.set_active_iter(i.iter)
# login shell
value = self.client.get_bool(KEY('/general/use_login_shell'))
self.get_widget('use_login_shell').set_active(value)
# tray icon
value = self.client.get_bool(KEY('/general/use_trayicon'))
self.get_widget('use_trayicon').set_active(value)
# popup
value = self.client.get_bool(KEY('/general/use_popup'))
self.get_widget('use_popup').set_active(value)
# prompt on quit
value = self.client.get_bool(KEY('/general/prompt_on_quit'))
self.get_widget('prompt_on_quit').set_active(value)
# prompt on close_tab
value = self.client.get_int(KEY('/general/prompt_on_close_tab'))
self.get_widget('prompt_on_close_tab').set_active(value)
self.get_widget('prompt_on_quit').set_sensitive(value != 2)
# ontop
value = self.client.get_bool(KEY('/general/window_ontop'))
self.get_widget('window_ontop').set_active(value)
# tab ontop
value = self.client.get_bool(KEY('/general/tab_ontop'))
self.get_widget('tab_ontop').set_active(value)
# losefocus
value = self.client.get_bool(KEY('/general/window_losefocus'))
self.get_widget('window_losefocus').set_active(value)
# use VTE titles
value = self.client.get_bool(KEY('/general/use_vte_titles'))
self.get_widget('use_vte_titles').set_active(value)
# max tab name length
value = self.client.get_int(KEY('/general/max_tab_name_length'))
self.get_widget('max_tab_name_length').set_value(value)
value = self.client.get_float(KEY('/general/window_height_f'))
if not value:
value = self.client.get_int(KEY('/general/window_height'))
self.get_widget('window_height').set_value(value)
value = self.client.get_float(KEY('/general/window_width_f'))
if not value:
value = self.client.get_int(KEY('/general/window_width'))
self.get_widget('window_width').set_value(value)
value = self.client.get_int(KEY('/general/window_halignment'))
which_button = {
ALIGN_RIGHT: 'radiobutton_align_right',
ALIGN_LEFT: 'radiobutton_align_left',
ALIGN_CENTER: 'radiobutton_align_center'
}
self.get_widget(which_button[value]).set_active(True)
value = self.client.get_bool(KEY('/general/open_tab_cwd'))
self.get_widget('open_tab_cwd').set_active(value)
# tab bar
value = self.client.get_bool(KEY('/general/window_tabbar'))
self.get_widget('window_tabbar').set_active(value)
# start fullscreen
value = self.client.get_bool(KEY('/general/start_fullscreen'))
self.get_widget('start_fullscreen').set_active(value)
# use visible bell
value = self.client.get_bool(KEY('/general/use_visible_bell'))
self.get_widget('use_visible_bell').set_active(value)
# use audible bell
value = self.client.get_bool(KEY('/general/use_audible_bell'))
self.get_widget('use_audible_bell').set_active(value)
# display number / use primary display
combo = self.get_widget('display_n')
dest_screen = self.client.get_int(KEY('/general/display_n'))
value = self.client.get_bool(KEY('/general/quick_open_enable'))
self.get_widget('quick_open_enable').set_active(value)
self.get_widget('quick_open_command_line').set_sensitive(value)
self.get_widget('quick_open_in_current_terminal').set_sensitive(value)
text = gtk.TextBuffer()
text = self.get_widget('quick_open_supported_patterns').get_buffer()
for title, matcher, _useless in QUICK_OPEN_MATCHERS:
text.insert_at_cursor("%s: %s\n" % (title, matcher))
self.get_widget('quick_open_supported_patterns').set_buffer(text)
value = self.client.get_string(KEY('/general/quick_open_command_line'))
if value is None:
value = "subl %(file_path)s:%(line_number)s"
self.get_widget('quick_open_command_line').set_text(value)
value = self.client.get_bool(KEY('/general/quick_open_in_current_terminal'))
self.get_widget('quick_open_in_current_terminal').set_active(value)
value = self.client.get_string(KEY('/general/startup_script'))
self.get_widget('startup_script').set_text(value)
# If Guake is configured to use a screen that is not currently attached,
# default to 'primary display' option.
screen = self.get_widget('config-window').get_screen()
n_screens = screen.get_n_monitors()
if dest_screen > n_screens - 1:
self.client.set_bool(KEY('/general/mouse_display'), False)
dest_screen = screen.get_primary_monitor()
self.client.set_int(KEY('/general/display_n'), dest_screen)
if dest_screen == ALWAYS_ON_PRIMARY:
first_item = combo.get_model().get_iter_first()
combo.set_active_iter(first_item)
else:
seen_first = False # first item "always on primary" is special
for i in combo.get_model():
if seen_first:
i_int = int(i[0].split()[0]) # extracts 1 from '1' or from '1 (primary)'
if i_int == dest_screen:
combo.set_active_iter(i.iter)
else:
seen_first = True
# use display where the mouse is currently
value = self.client.get_bool(KEY('/general/mouse_display'))
self.get_widget('mouse_display').set_active(value)
# scrollbar
value = self.client.get_bool(KEY('/general/use_scrollbar'))
self.get_widget('use_scrollbar').set_active(value)
# history size
value = self.client.get_int(KEY('/general/history_size'))
self.get_widget('history_size').set_value(value)
# scroll output
value = self.client.get_bool(KEY('/general/scroll_output'))
self.get_widget('scroll_output').set_active(value)
# scroll keystroke
value = self.client.get_bool(KEY('/general/scroll_keystroke'))
self.get_widget('scroll_keystroke').set_active(value)
# default font
value = self.client.get_bool(KEY('/general/use_default_font'))
self.get_widget('use_default_font').set_active(value)
self.get_widget('font_style').set_sensitive(not value)
# use font and background color
value = self.client.get_bool(KEY('/general/use_palette_font_and_background_color'))
self.get_widget('use_palette_font_and_background_color').set_active(value)
self.get_widget('palette_16').set_sensitive(value)
self.get_widget('palette_17').set_sensitive(value)
# font
value = self.client.get_string(KEY('/style/font/style'))
if value:
self.get_widget('font_style').set_font_name(value)
# font color
val = self.client.get_string(KEY('/style/font/color'))
try:
color = gtk.gdk.color_parse(val)
self.get_widget('font_color').set_color(color)
except (ValueError, TypeError):
warnings.warn('Unable to parse color %s' % val, Warning)
# background color
value = self.client.get_string(KEY('/style/background/color'))
try:
color = gtk.gdk.color_parse(value)
self.get_widget('background_color').set_color(color)
except (ValueError, TypeError):
warnings.warn('Unable to parse color %s' % val, Warning)
# allow bold font
value = self.client.get_bool(KEY('/style/font/allow_bold'))
self.get_widget('allow_bold').set_active(value)
# palette
self.fill_palette_names()
value = self.client.get_string(KEY('/style/font/palette_name'))
self.set_palette_name(value)
value = self.client.get_string(KEY('/style/font/palette'))
self.set_palette_colors(value)
self.update_demo_palette(value)
# cursor shape
value = self.client.get_int(KEY('/style/cursor_shape'))
self.set_cursor_shape(value)
# cursor blink
value = self.client.get_int(KEY('/style/cursor_blink_mode'))
self.set_cursor_blink_mode(value)
# background image
value = self.client.get_string(KEY('/style/background/image'))
if os.path.isfile(value or ''):
self.get_widget('background_image').set_filename(value)
value = self.client.get_int(KEY('/style/background/transparency'))
self.get_widget('background_transparency').set_value(value)
value = self.client.get_int(KEY('/general/window_valignment'))
self.get_widget('top_align').set_active(value)
# it's a separated method, to be reused.
self.reload_erase_combos()
# custom command context-menu configuration file
value = os.path.expanduser(self.client.get_string(KEY('/general/custom_command_file')))
custom_cmd_filter = gtk.FileFilter()
custom_cmd_filter.set_name(_("JSON files"))
custom_cmd_filter.add_pattern("*.json")
self.get_widget('custom_command_file_chooser').add_filter(custom_cmd_filter)
all_files_filter = gtk.FileFilter()
all_files_filter.set_name(_("All files"))
all_files_filter.add_pattern("*")
self.get_widget('custom_command_file_chooser').add_filter(all_files_filter)
self.get_widget('custom_command_file_chooser').set_filename(value)
# -- populate functions --
def populate_shell_combo(self):
"""Read the /etc/shells and looks for installed shells to
fill the default_shell combobox.
"""
cb = self.get_widget('default_shell')
# append user shell as first option
cb.append_text(USER_SHELL_VALUE)
if os.path.exists(SHELLS_FILE):
lines = open(SHELLS_FILE).readlines()
for i in lines:
possible = i.strip()
if possible and not possible.startswith('#') and os.path.exists(possible):
cb.append_text(possible)
for i in get_binaries_from_path(PYTHONS):
cb.append_text(i)
def populate_keys_tree(self):
"""Reads the HOTKEYS global variable and insert all data in
the TreeStore used by the preferences window treeview.
"""
model = self.get_widget('treeview-keys').get_model()
for group in HOTKEYS:
giter = model.append(None)
model.set(giter, 0, '', 1, _(group['label']))
for item in group['keys']:
child = model.append(giter)
accel = self.client.get_string(item['key'])
if accel:
params = gtk.accelerator_parse(accel)
hotkey = KeyEntry(*params)
else:
hotkey = KeyEntry(0, 0)
model.set(child,
0, item['key'],
1, _(item['label']),
2, hotkey,
3, True)
self.get_widget('treeview-keys').expand_all()
def populate_display_n(self):
"""Get the number of displays and populate this drop-down box
with them all. Prepend the "always on primary" option.
"""
cb = self.get_widget('display_n')
screen = self.get_widget('config-window').get_screen()
cb.append_text("always on primary")
for m in range(0, int(screen.get_n_monitors())):
if m == int(screen.get_primary_monitor()):
# TODO l10n
cb.append_text(str(m) + ' ' + '(primary)')
else:
cb.append_text(str(m))
# -- key handling --
def on_key_edited(self, renderer, path, keycode, mask, keyval, model):
"""Callback that handles key edition in cellrenderer. It makes
some tests to validate the key, like looking for already in
use keys and look for [A-Z][a-z][0-9] to avoid problems with
these common keys. If all tests are ok, the value will be
stored in gconf.
"""
giter = model.get_iter(path)
gconf_path = model.get_value(giter, 0)
oldkey = model.get_value(giter, 2)
hotkey = KeyEntry(keycode, mask)
key = gtk.accelerator_name(keycode, mask)
keylabel = gtk.accelerator_get_label(keycode, mask)
# we needn't to change anything, the user is trying to set the
# same key that is already set.
if oldkey == hotkey:
return False
# looking for already used keybindings
def each_key(model, path, subiter):
keyentry = model.get_value(subiter, 2)
if keyentry and keyentry == hotkey:
msg = _("The shortcut \"%s\" is already in use.") % keylabel
raise ShowableError(_('Error setting keybinding.'), msg, -1)
model.foreach(each_key)
# avoiding problems with common keys
if ((mask == 0 and keycode != 0) and (
(keycode >= ord('a') and keycode <= ord('z')) or
(keycode >= ord('A') and keycode <= ord('Z')) or
(keycode >= ord('0') and keycode <= ord('9')))):
dialog = gtk.MessageDialog(
self.get_widget('config-window'),
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING, gtk.BUTTONS_OK,
_("The shortcut \"%s\" cannot be used "
"because it will become impossible to "
"type using this key.\n\n"
"Please try with a key such as "
"Control, Alt or Shift at the same "
"time.\n") % key)
dialog.run()
dialog.destroy()
return False
# setting new value in ui
giter = model.get_iter(path)
model.set_value(giter, 2, hotkey)
# setting the new value in gconf
self.client.set_string(gconf_path, key)
def on_key_cleared(self, renderer, path, model):
"""If the user tries to clear a keybinding with the backspace
key this callback will be called and it just fill the model
with an empty key and set the 'disabled' string in gconf path.
"""
giter = model.get_iter(path)
gconf_path = model.get_value(giter, 0)
self.client.get_string(gconf_path)
model.set_value(giter, 2, KeyEntry(0, 0))
self.client.set_string(gconf_path, 'disabled')
def cell_data_func(self, column, renderer, model, giter):
"""Defines the way that each renderer will handle the key
object and the mask it sets the properties for a cellrenderer
key.
"""
obj = model.get_value(giter, 2)
if obj:
renderer.set_property('visible', True)
renderer.set_property('accel-key', obj.keycode)
renderer.set_property('accel-mods', obj.mask)
else:
renderer.set_property('visible', False)
renderer.set_property('accel-key', 0)
renderer.set_property('accel-mods', 0)
def start_editing(self, treeview, event):
"""Make the treeview grab the focus and start editing the cell
that the user has clicked to avoid confusion with two or three
clicks before editing a keybinding.
Thanks to gnome-keybinding-properties.c =)
"""
if event.window != treeview.get_bin_window():
return False
x, y = int(event.x), int(event.y)
ret = treeview.get_path_at_pos(x, y)
if not ret:
return False
path, column, cellx, celly = ret
if path and len(path) > 1:
def real_cb():
treeview.grab_focus()
treeview.set_cursor(path, column, True)
treeview.stop_emission('button-press-event')
gobject.idle_add(real_cb)
return True
class KeyEntry(object):
def __init__(self, keycode, mask):
self.keycode = keycode
self.mask = mask
def __repr__(self):
return u'KeyEntry(%d, %d)' % (
self.keycode, self.mask)
def __eq__(self, rval):
return self.keycode == rval.keycode and self.mask == rval.mask
def setup_standalone_signals(instance):
"""Called when prefs dialog is running in standalone mode. It
makes the delete event of dialog and click on close button finish
the application.
"""
window = instance.get_widget('config-window')
window.connect('delete-event', gtk.main_quit)
# We need to block the execution of the already associated
# callback before connecting the new handler.
button = instance.get_widget('button1')
button.handler_block_by_func(instance.gtk_widget_destroy)
button.connect('clicked', gtk.main_quit)
return instance
if __name__ == '__main__':
bindtextdomain(NAME, LOCALE_DIR)
setup_standalone_signals(PrefsDialog()).show()
gtk.main()
|
thardev/guake
|
src/guake/prefs.py
|
Python
|
gpl-2.0
| 44,668
|
#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstilä #
# 2008 myfingershurt #
# 2008 Glorandwarf #
# 2008 evilynux <evilynux@gmail.com> #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
import os
import glob
import random
from fretwork import log
from fretwork.audio import Sound
from fofix.core.Font import Font
from fofix.core.Image import ImgDrawing
from fofix.core import Config
from fofix.core import Version
from fofix.core import Player
# these constants define a few customized letters in the default font
#MFH - with the new simplified Font.py, no more custom glyphs... let's do a simple replacement here for now...
STAR1 = ' '
STAR2 = '*'
LEFT = '<'
RIGHT = '>'
STAR3 = STAR1
STAR4 = STAR2
#-STAR1 = unicode('\x10')
#-STAR2 = unicode('\x11')
#-LEFT = unicode('\x12')
#-RIGHT = unicode('\x13')
#-STAR3 = unicode('\x14') #Worldrave - Added new Star3
#-STAR4 = unicode('\x15') #Worldrave - Added new Star4
class Data(object):
"""A collection of globally used data resources such as fonts and sound effects."""
def __init__(self, resource, svg):
self.logClassInits = Config.get("game", "log_class_inits")
if self.logClassInits == 1:
log.debug("Data class init (Data.py)...")
self.logLoadings = Config.get("game", "log_loadings")
self.logImageNotFound = Config.get("log", "log_image_not_found")
self.resource = resource
self.svg = svg
self.sfxVolume = Config.get("audio", "SFX_volume")
self.crowdVolume = Config.get("audio", "crowd_volume")
#Get theme
themename = Config.get("coffee", "themename")
self.themeLabel = themename
self.themeCoOp = False
self.players = None
self.players = Player.loadPlayers()
#myfingershurt: check for existence of theme path
themepath = os.path.join(Version.dataPath(), "themes")
self.themepath = themepath
self.path = Version.dataPath()
if not self.checkImgDrawing(os.path.join("themes",themename,"notes","notes.png")):
#myfingershurt: here need to ensure an existing theme is selected
themes = []
defaultTheme = None #myfingershurt
allthemes = os.listdir(themepath)
for name in allthemes:
if self.checkImgDrawing(os.path.join("themes",name,"notes","notes.png")):
themes.append(name)
if name == "MegaLight V4": #myfingershurt
defaultTheme = name #myfingershurt
if defaultTheme != "MegaLight V4": #myfingershurt
defaultTheme = themes[0] #myfingershurt
#not a valid theme if notes.png isn't there! Force default theme:
Config.set("coffee", "themename",defaultTheme)
#re-init Data with new default
themename = defaultTheme
self.themeLabel = themename
if not os.path.exists(os.path.join(Version.dataPath(), "themes", themename, "vocals")):
self.vocalPath = "vocals"
else:
self.vocalPath = os.path.join("themes",themename,"vocals")
self.theme = 2
self.themeCoOp = True
self.fontScreenBottom = 0.75 #from our current viewport's constant 3:4 aspect ratio (which is always stretched to fill the video resolution)
self.loadPartImages()
#myfingershurt: multi-OS compatibility file access fixes using os.path.join()
# load font customization images
#Worldrave - Use new defined Star3 and star4. Using star1 and star2 as a fallback.
#MFH - no more custom glyphs, these are wasting memory.
#MFH - but we do need these star1-4 images anyway. Leaving them loaded here in the Data object.
self.loadImgDrawing(self, "star1", os.path.join("themes",themename,"star1.png"), textureSize = (128, 128))
self.loadImgDrawing(self, "star2", os.path.join("themes",themename,"star2.png"), textureSize = (128, 128))
#MFH - let's not rely on errors here if we don't have to...
if not self.loadImgDrawing(self, "star3", os.path.join("themes",themename,"star3.png"), textureSize = (128, 128)):
self.star3 = self.star1
if not self.loadImgDrawing(self, "star4", os.path.join("themes",themename,"star4.png"), textureSize = (128, 128)):
self.star4 = self.star2
if self.loadImgDrawing(self, "starPerfect", os.path.join("themes",themename,"starperfect.png"), textureSize = (128, 128)):
self.perfectStars = True
self.maskStars = False
else:
self.starPerfect = self.star2
self.fcStars = False
self.starFC = self.star2
self.maskStars = True
self.perfectStars = False
if self.perfectStars:
if self.loadImgDrawing(self, "starFC", os.path.join("themes",themename,"starfc.png"), textureSize = (128, 128)):
self.fcStars = True
else:
self.starFC = self.starPerfect
self.fcStars = False
# load misc images
self.loadImgDrawing(self, "loadingImage", os.path.join("themes",themename,"loadingbg.png"), textureSize = (256,256))
self.loadImgDrawing(self, "optionsBG", os.path.join("themes",themename,"menu","optionsbg.png"))
if self.loadImgDrawing(self, "submenuSelect", os.path.join("themes",themename,"submenuselect.png")):
subSelectImgW = self.submenuSelect.width1()
self.submenuSelectFound = True
self.subSelectWFactor = 640.000/subSelectImgW
self.subSelectImgH = self.submenuSelect.height1()
else:
self.submenuSelectFound = False
self.loadImgDrawing(self, "submenuSelect", os.path.join("themes",themename,"menu","selected.png"))
self.subSelectWFactor = 0
# load all the data in parallel
# asciiOnly = not bool(Language.language) or Language.language == "Custom"
# reversed = _("__lefttoright__") == "__righttoleft__" and True or False
asciiOnly = True
reversed = False
scale = 1
# evilynux - Load bigger fonts so they're nicer when scaled, scaling readjusted
fontSize = [44, 132, 34, 32, 30]
w, h = [int(s) for s in Config.get("video", "resolution").split("x")]
aspectRatio = float(w)/float(h)
self.fontList = [
["font1","font","default.ttf",fontSize[4]],
["font2","bigFont","title.ttf",fontSize[1]],
["font3","pauseFont","pause.ttf",fontSize[2]],
["font4","scoreFont","score.ttf",fontSize[3]],
["font5","streakFont","streak.ttf",fontSize[3]],
["font6","loadingFont","loading.ttf",fontSize[3]],
["font7","songFont","song.ttf",fontSize[4]],
["font8","songListFont","songlist.ttf",fontSize[3]],
["font9","shadowFont","songlist.ttf",fontSize[3]],
["font10","streakFont2","streakphrase.ttf",fontSize[2]]
]
for f in self.fontList:
if self.fileExists(os.path.join("themes",themename,"fonts",f[2])):
fn = resource.fileName(os.path.join("themes",themename,"fonts",f[2]))
f[0] = lambda: Font(fn, f[3], scale = scale*.5, reversed = reversed, systemFont = not asciiOnly, outline = False, aspectRatio = aspectRatio)
resource.load(self,f[1],f[0], synch = True)
elif self.fileExists(os.path.join("themes",themename,"fonts","default.ttf")):
log.debug("Theme font not found: " + f[2])
fn = resource.fileName(os.path.join("themes",themename,"fonts","default.ttf"))
f[0] = lambda: Font(fn, f[3], scale = scale*.5, reversed = reversed, systemFont = not asciiOnly, outline = False, aspectRatio = aspectRatio)
resource.load(self,f[1],f[0], synch = True)
else:
log.debug("Default theme font not found: %s - using built-in default" % str(f[2]))
fn = resource.fileName(os.path.join("fonts","default.ttf"))
f[0] = lambda: Font(fn, f[3], scale = scale*.5, reversed = reversed, systemFont = not asciiOnly, outline = False, aspectRatio = aspectRatio)
resource.load(self,f[1],f[0], synch = True)
self.fontDict = {"font": self.font, "bigFont": self.bigFont, "pauseFont": self.pauseFont, "scoreFont": self.scoreFont,
"streakFont": self.streakFont, "songFont": self.songFont, "streakFont2": self.streakFont2,
"songListFont": self.songListFont, "shadowFont": self.shadowFont, "loadingFont": self.loadingFont}
assert self.fontDict['font'] == self.font
# load sounds asynchronously
resource.load(self, "screwUpsounds", self.loadScrewUpsounds)
resource.load(self, "screwUpsoundsBass", self.loadScrewUpsoundsBass)
resource.load(self, "screwUpsoundsDrums", self.loadScrewUpsoundsDrums)
resource.load(self, "acceptSounds", self.loadAcceptSounds)
resource.load(self, "cancelSounds", self.loadBackSounds)
# loadSoundEffect asynchronously
self.syncSounds = [
["bassDrumSound","bassdrum.ogg"],
["battleUsedSound","battleused.ogg"],
["CDrumSound","crash.ogg"],
["clapSound","clapsound.ogg"],
["coOpFailSound","coopfail.ogg"],
#["crowdSound","crowdcheers.ogg"],
["failSound","failsound.ogg"],
["rescueSound","rescue.ogg"],
["rockSound","rocksound.ogg"],
["selectSound1","select1.ogg"],
["selectSound2","select2.ogg"],
["selectSound3","select3.ogg"],
["starActivateSound","staractivate.ogg"],
["starDeActivateSound","stardeactivate.ogg"],
["starDingSound","starding.ogg"],
["starLostSound","starlost.ogg"],
["starReadySound","starpowerready.ogg"],
["starSound","starpower.ogg"],
["startSound","start.ogg"],
["T1DrumSound","tom01.ogg"],
["T2DrumSound","tom02.ogg"],
["T3DrumSound","tom03.ogg"]
]
for self.sounds in self.syncSounds:
if self.fileExists(os.path.join("themes",themename,"sounds",self.sounds[1])):
self.loadSoundEffect(self, self.sounds[0], os.path.join("themes",themename,"sounds",self.sounds[1]))
elif self.fileExists(os.path.join("sounds", self.sounds[1])):
log.debug("Theme sound not found: " + self.sounds[1])
self.loadSoundEffect(self, self.sounds[0], os.path.join("sounds",self.sounds[1]))
else:
log.warn("File " + self.sounds[1] + " not found using default instead.")
self.loadSoundEffect(self, self.sounds[0], os.path.join("sounds","default.ogg"))
#TODO: Simplify crowdSound stuff so it can join the rest of us.
#MFH - fallback on sounds/crowdcheers.ogg, and then starpower.ogg. Note if the fallback crowdcheers was used or not.
if self.fileExists(os.path.join("themes",themename,"sounds","crowdcheers.ogg")):
self.loadSoundEffect(self, "crowdSound", os.path.join("themes",themename,"sounds","crowdcheers.ogg"), crowd = True)
self.cheerSoundFound = 2
elif self.fileExists(os.path.join("sounds","crowdcheers.ogg")):
self.loadSoundEffect(self, "crowdSound", os.path.join("sounds","crowdcheers.ogg"), crowd = True)
self.cheerSoundFound = 1
log.warn(themename + "/sounds/crowdcheers.ogg not found -- using data/sounds/crowdcheers.ogg instead.")
else:
self.cheerSoundFound = 0
log.warn("crowdcheers.ogg not found -- no crowd cheering.")
def loadPartImages(self):
self.partImages = []
self.partImages.append(self.loadImgDrawing(None, "guitar", os.path.join("themes",self.themeLabel,"common","guitar.png")))
self.partImages.append(self.loadImgDrawing(None, "rhythm", os.path.join("themes",self.themeLabel,"common","rhythm.png")))
self.partImages.append(self.loadImgDrawing(None, "bass", os.path.join("themes",self.themeLabel,"common","bass.png")))
self.partImages.append(self.loadImgDrawing(None, "lead", os.path.join("themes",self.themeLabel,"common","lead.png")))
self.partImages.append(self.loadImgDrawing(None, "drum", os.path.join("themes",self.themeLabel,"common","drum.png")))
self.partImages.append(self.loadImgDrawing(None, "vocal", os.path.join("themes",self.themeLabel,"common","vocal.png")))
def SetAllScrewUpSoundFxObjectVolumes(self, volume): #MFH - single function to go through all screwup sound objects and set object volume to the given volume
for s in self.screwUpsounds:
s.setVolume(volume)
for s in self.screwUpsoundsBass:
s.setVolume(volume)
for s in self.screwUpsoundsDrums:
s.setVolume(volume)
def SetAllSoundFxObjectVolumes(self, volume = None): #MFH - single function to go through all sound objects (and iterate through all sound lists) and set object volume to the given volume
#MFH TODO - set every sound object's volume here...
if volume is None:
self.sfxVolume = Config.get("audio", "SFX_volume")
self.crowdVolume = Config.get("audio", "crowd_volume")
volume = self.sfxVolume
self.starDingSound.setVolume(volume)
self.bassDrumSound.setVolume(volume)
self.T1DrumSound.setVolume(volume)
self.T2DrumSound.setVolume(volume)
self.T3DrumSound.setVolume(volume)
self.CDrumSound.setVolume(volume)
for s in self.acceptSounds:
s.setVolume(volume)
for s in self.cancelSounds:
s.setVolume(volume)
self.rockSound.setVolume(volume)
self.starDeActivateSound.setVolume(volume)
self.starActivateSound.setVolume(volume)
self.battleUsedSound.setVolume(volume)
self.rescueSound.setVolume(volume)
self.coOpFailSound.setVolume(volume)
self.crowdSound.setVolume(self.crowdVolume)
self.starReadySound.setVolume(volume)
self.clapSound.setVolume(volume)
self.failSound.setVolume(volume)
self.starSound.setVolume(volume)
self.startSound.setVolume(volume)
self.selectSound1.setVolume(volume)
self.selectSound2.setVolume(volume)
self.selectSound3.setVolume(volume)
def loadSoundEffect(self, target, name, fileName, crowd = False):
volume = self.sfxVolume
if crowd:
volume = self.crowdVolume
fileName = self.resource.fileName(fileName)
self.resource.load(target, name, lambda: Sound(fileName), onLoad = lambda s: s.setVolume(volume))
def determineNumSounds(self, soundPath, soundPrefix, soundExtension = ".ogg"): #MFH - auto random sound enumeration
soundNum = 1
while self.fileExists(os.path.join(soundPath,"%s%d%s" % (soundPrefix, soundNum, soundExtension) ) ):
soundNum += 1
return soundNum-1
def getSoundObjectList(self, soundPath, soundPrefix, numSounds, soundExtension = ".ogg"): #MFH
log.debug("{0}1{2} - {0}{1}{2} found in {3}".format(soundPrefix, numSounds, soundExtension, soundPath))
sounds = []
for i in xrange(1, numSounds+1):
filePath = os.path.join(soundPath, "%s%d%s" % (soundPrefix, i, soundExtension) )
soundObject = Sound(self.resource.fileName(filePath))
sounds.append(soundObject)
return sounds
def loadBackSounds(self): #MFH - adding optional support for random choice between two back sounds
soundPathTheme = os.path.join("themes",self.themeLabel,"sounds")
soundPath = soundPathTheme
soundPrefix = "back"
numSounds = self.determineNumSounds(soundPath, soundPrefix)
if numSounds > 0:
return self.getSoundObjectList(soundPath, soundPrefix, numSounds)
else:
return [Sound(self.resource.fileName(os.path.join("themes",self.themeLabel,"sounds","out.ogg")))]
def loadAcceptSounds(self):
soundPathTheme = os.path.join("themes",self.themeLabel,"sounds")
soundPath = soundPathTheme
soundPrefix = "accept"
numSounds = self.determineNumSounds(soundPath, soundPrefix)
if numSounds > 0:
return self.getSoundObjectList(soundPath, soundPrefix, numSounds)
else:
if self.theme == 0 or self.theme == 1:#GH2 or GH3
return [Sound(self.resource.fileName(os.path.join("themes",self.themeLabel,"sounds","in.ogg")))]
elif self.theme == 2:
return [Sound(self.resource.fileName(os.path.join("themes",self.themeLabel,"sounds","action.ogg")))]
def loadScrewUpsounds(self):
soundPathTheme = os.path.join("themes",self.themeLabel,"sounds")
soundPathData = "sounds"
soundPath = soundPathTheme
soundPrefix = "guitscw"
numSounds = self.determineNumSounds(soundPath, soundPrefix)
if numSounds == 0:
soundPath = soundPathData
numSounds = self.determineNumSounds(soundPath, soundPrefix)
return self.getSoundObjectList(soundPath, soundPrefix, numSounds)
def loadScrewUpsoundsBass(self):
soundPathTheme = os.path.join("themes",self.themeLabel,"sounds")
soundPathData = "sounds"
soundPath = soundPathTheme
soundPrefix = "bassscw"
numSounds = self.determineNumSounds(soundPath, soundPrefix)
if numSounds == 0:
soundPath = soundPathData
numSounds = self.determineNumSounds(soundPath, soundPrefix)
return self.getSoundObjectList(soundPath, soundPrefix, numSounds)
def loadScrewUpsoundsDrums(self):
soundPathTheme = os.path.join("themes",self.themeLabel,"sounds")
soundPathData = "sounds"
soundPath = soundPathTheme
soundPrefix = "drumscw"
numSounds = self.determineNumSounds(soundPath, soundPrefix)
if numSounds == 0:
soundPath = soundPathData
numSounds = self.determineNumSounds(soundPath, soundPrefix)
return self.getSoundObjectList(soundPath, soundPrefix, numSounds)
def loadSyncsounds(self):
return [Sound(self.resource.fileName("sync%d.ogg" % i)) for i in range(1, 2)]
def checkImgDrawing(self, fileName):
return self.getImgDrawing(fileName, False)
def getImgDrawing(self, fileName, openImage=True):
imgDrawing = None
for dataPath in self.resource.dataPaths:
fileName1 = os.path.join(dataPath, fileName)
if self.logLoadings == 1:
if openImage:
log.notice("Trying to load image: %s" % fileName1)
else:
log.notice("Checking image: %s" % fileName1)
#check if fileName1 exists (has extension)
if os.path.exists(fileName1):
if openImage:
try:
imgDrawing = ImgDrawing(self.svg, fileName1)
return imgDrawing
except IOError:
log.warn("Unable to load image file: %s" % fileName1)
except OverflowError:
log.warn("Unable to read image file: %s" % fileName1)
else:
return True
else:
#find extension
fileName1 = os.path.splitext(fileName1)[0]
files = glob.glob('%s.*' % fileName1)
if openImage:
for i in range(len(files)):
try:
imgDrawing = ImgDrawing(self.svg, files[i])
return imgDrawing
except IOError:
log.warn("Unable to load image file: %s" % files[i])
elif len(files) > 0:
return True
#image not found
if self.logImageNotFound:
log.debug("Image not found: %s" % fileName)
return False
def loadImgDrawing(self, target, name, fileName, textureSize = None):
"""
Load an SVG drawing synchronously.
@param target: An object that will own the drawing
@param name: The name of the attribute the drawing will be assigned to
@param fileName: The name of the file in the data directory
@param textureSize: Either None or (x, y), in which case the file will
be rendered to an x by y texture
@return: L{ImgDrawing} instance
"""
imgDrawing = self.getImgDrawing(fileName)
if not imgDrawing:
if target and name:
setattr(target, name, None)
else:
log.error("Image not found: " + fileName)
return None
if target:
drawing = self.resource.load(target, name, lambda: imgDrawing, synch = True)
else:
drawing = imgDrawing
return drawing
def loadAllImages(self, target, directory, prefix = "img_", textureSize = None): #akedrou
"""
Loads all images found in a folder to a given target.
@param target: An object that will own the drawings
@param directory: The directory that will be searched for image files.
@param textureSize: Either None or (x, y), in which case the files will
be rendered to an x by y texture
"""
if not os.path.isdir(os.path.join(self.path, directory)):
return None
imgDict = {}
for file in os.listdir(os.path.join(self.path, directory)):
if file == "thumbs.db" or file == "Thumbs.db":
continue
elif file[0] == ".":
continue
elif os.path.isdir(os.path.join(self.path, directory, file)):
continue
name = os.path.splitext(file)[0]
name = prefix+name
img = self.loadImgDrawing(target, name, os.path.join(directory, file), textureSize)
if img and target is None:
imgDict[name] = img
if target is None and len(imgDict) > 0:
return imgDict
#glorandwarf: changed name to getPath
def getPath(self, fileName):
return self.resource.fileName(fileName)
#myfingershurt: still need this fileexists function:
def fileExists(self, fileName):
fileName = self.resource.fileName(fileName)
return os.path.exists(fileName)
#MFH - acceptSound and selectSound will now be merged into either 10 random sounds or just the acceptSound as a fallback:
def getAcceptSound(self):
"""@return: A randomly chosen selection sound."""
return random.choice(self.acceptSounds)
acceptSound = property(getAcceptSound)
def getBackSound(self):
"""@return: A randomly chosen selection sound."""
return random.choice(self.cancelSounds)
cancelSound = property(getBackSound)
def getSelectSound(self):
"""@return: A randomly chosen selection sound."""
return random.choice([self.selectSound1, self.selectSound2, self.selectSound3])
selectSound = property(getSelectSound)
def getScrewUpSound(self):
"""@return: A randomly chosen screw-up sound."""
return random.choice(self.screwUpsounds)
def getScrewUpSoundBass(self):
"""@return: A randomly chosen screw-up sound."""
return random.choice(self.screwUpsoundsBass)
#myfingershurt: drums screw up sounds
def getScrewUpSoundDrums(self):
"""@return: A randomly chosen screw-up sound."""
return random.choice(self.screwUpsoundsDrums)
screwUpSound = property(getScrewUpSound)
screwUpSoundBass = property(getScrewUpSoundBass)
screwUpSoundDrums = property(getScrewUpSoundDrums) #myfingershurt: drum screw up sounds
def essentialResourcesLoaded(self):
"""return: True if essential resources such as the font have been loaded."""
return bool(self.font and self.bigFont)
def resourcesLoaded(self):
"""return: True if all the resources have been loaded."""
return None not in self.__dict__.values()
|
fculpo/fofix
|
fofix/core/Data.py
|
Python
|
gpl-2.0
| 25,953
|
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Driver base-classes:
(Beginning of) the contract that compute drivers must follow, and shared
types that support that contract
"""
import sys
from oslo_log import log as logging
from oslo_utils import importutils
import six
import nova.conf
from nova.i18n import _, _LE, _LI
from nova import utils
from nova.virt import event as virtevent
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
def driver_dict_from_config(named_driver_config, *args, **kwargs):
driver_registry = dict()
for driver_str in named_driver_config:
driver_type, _sep, driver = driver_str.partition('=')
driver_class = importutils.import_class(driver)
driver_registry[driver_type] = driver_class(*args, **kwargs)
return driver_registry
def get_block_device_info(instance, block_device_mapping):
"""Converts block device mappings for an instance to driver format.
Virt drivers expect block device mapping to be presented in the format
of a dict containing the following keys:
- root_device_name: device name of the root disk
- ephemerals: a (potentially empty) list of DriverEphemeralBlockDevice
instances
- swap: An instance of DriverSwapBlockDevice or None
- block_device_mapping: a (potentially empty) list of
DriverVolumeBlockDevice or any of it's more
specialized subclasses.
"""
from nova.virt import block_device as virt_block_device
block_device_info = {
'root_device_name': instance.root_device_name,
'ephemerals': virt_block_device.convert_ephemerals(
block_device_mapping),
'block_device_mapping':
virt_block_device.convert_all_volumes(*block_device_mapping)
}
swap_list = virt_block_device.convert_swap(block_device_mapping)
block_device_info['swap'] = virt_block_device.get_swap(swap_list)
return block_device_info
def block_device_info_get_root(block_device_info):
block_device_info = block_device_info or {}
return block_device_info.get('root_device_name')
def block_device_info_get_swap(block_device_info):
block_device_info = block_device_info or {}
return block_device_info.get('swap') or {'device_name': None,
'swap_size': 0}
def swap_is_usable(swap):
return swap and swap['device_name'] and swap['swap_size'] > 0
def block_device_info_get_ephemerals(block_device_info):
block_device_info = block_device_info or {}
ephemerals = block_device_info.get('ephemerals') or []
return ephemerals
def block_device_info_get_mapping(block_device_info):
block_device_info = block_device_info or {}
block_device_mapping = block_device_info.get('block_device_mapping') or []
return block_device_mapping
class ComputeDriver(object):
"""Base class for compute drivers.
The interface to this class talks in terms of 'instances' (Amazon EC2 and
internal Nova terminology), by which we mean 'running virtual machine'
(XenAPI terminology) or domain (Xen or libvirt terminology).
An instance has an ID, which is the identifier chosen by Nova to represent
the instance further up the stack. This is unfortunately also called a
'name' elsewhere. As far as this layer is concerned, 'instance ID' and
'instance name' are synonyms.
Note that the instance ID or name is not human-readable or
customer-controlled -- it's an internal ID chosen by Nova. At the
nova.virt layer, instances do not have human-readable names at all -- such
things are only known higher up the stack.
Most virtualization platforms will also have their own identity schemes,
to uniquely identify a VM or domain. These IDs must stay internal to the
platform-specific layer, and never escape the connection interface. The
platform-specific layer is responsible for keeping track of which instance
ID maps to which platform-specific ID, and vice versa.
Some methods here take an instance of nova.compute.service.Instance. This
is the data structure used by nova.compute to store details regarding an
instance, and pass them into this layer. This layer is responsible for
translating that generic data structure into terms that are specific to the
virtualization platform.
"""
capabilities = {
"has_imagecache": False,
"supports_recreate": False,
"supports_migrate_to_same_host": False
}
def __init__(self, virtapi):
self.virtapi = virtapi
self._compute_event_callback = None
def init_host(self, host):
"""Initialize anything that is necessary for the driver to function,
including catching up with currently running VM's on the given host.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def cleanup_host(self, host):
"""Clean up anything that is necessary for the driver gracefully stop,
including ending remote sessions. This is optional.
"""
pass
def get_info(self, instance):
"""Get the current status of an instance, by name (not ID!)
:param instance: nova.objects.instance.Instance object
Returns a InstanceInfo object
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_num_instances(self):
"""Return the total number of virtual machines.
Return the number of virtual machines that the hypervisor knows
about.
.. note::
This implementation works for all drivers, but it is
not particularly efficient. Maintainers of the virt drivers are
encouraged to override this method with something more
efficient.
"""
return len(self.list_instances())
def instance_exists(self, instance):
"""Checks existence of an instance on the host.
:param instance: The instance to lookup
Returns True if an instance with the supplied ID exists on
the host, False otherwise.
.. note::
This implementation works for all drivers, but it is
not particularly efficient. Maintainers of the virt drivers are
encouraged to override this method with something more
efficient.
"""
try:
return instance.uuid in self.list_instance_uuids()
except NotImplementedError:
return instance.name in self.list_instances()
def estimate_instance_overhead(self, instance_info):
"""Estimate the virtualization overhead required to build an instance
of the given flavor.
Defaults to zero, drivers should override if per-instance overhead
calculations are desired.
:param instance_info: Instance/flavor to calculate overhead for.
:returns: Dict of estimated overhead values.
"""
return {'memory_mb': 0}
def list_instances(self):
"""Return the names of all the instances known to the virtualization
layer, as a list.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def list_instance_uuids(self):
"""Return the UUIDS of all the instances known to the virtualization
layer, as a list.
"""
raise NotImplementedError()
def rebuild(self, context, instance, image_meta, injected_files,
admin_password, bdms, detach_block_devices,
attach_block_devices, network_info=None,
recreate=False, block_device_info=None,
preserve_ephemeral=False):
"""Destroy and re-make this instance.
A 'rebuild' effectively purges all existing data from the system and
remakes the VM with given 'metadata' and 'personalities'.
This base class method shuts down the VM, detaches all block devices,
then spins up the new VM afterwards. It may be overridden by
hypervisors that need to - e.g. for optimisations, or when the 'VM'
is actually proxied and needs to be held across the shutdown + spin
up steps.
:param context: security context
:param instance: nova.objects.instance.Instance
This function should use the data there to guide
the creation of the new instance.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in instance.
:param bdms: block-device-mappings to use for rebuild
:param detach_block_devices: function to detach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage.
:param attach_block_devices: function to attach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param recreate: True if the instance is being recreated on a new
hypervisor - all the cleanup of old state is skipped.
:param block_device_info: Information about block devices to be
attached to the instance.
:param preserve_ephemeral: True if the default ephemeral storage
partition must be preserved on rebuild
"""
raise NotImplementedError()
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Create a new instance/VM/domain on the virtualization platform.
Once this successfully completes, the instance should be
running (power_state.RUNNING).
If this fails, any partial instance should be completely
cleaned up, and the virtualization platform should be in the state
that it was before this call began.
:param context: security context
:param instance: nova.objects.instance.Instance
This function should use the data there to guide
the creation of the new instance.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in instance.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices to be
attached to the instance.
"""
raise NotImplementedError()
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None):
"""Destroy the specified instance from the Hypervisor.
If the instance is not found (for example if networking failed), this
function should still succeed. It's probably a good idea to log a
warning in that case.
:param context: security context
:param instance: Instance object as returned by DB layer.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices that should
be detached from the instance.
:param destroy_disks: Indicates if disks should be destroyed
:param migrate_data: implementation specific params
"""
raise NotImplementedError()
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
"""Cleanup the instance resources .
Instance should have been destroyed from the Hypervisor before calling
this method.
:param context: security context
:param instance: Instance object as returned by DB layer.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices that should
be detached from the instance.
:param destroy_disks: Indicates if disks should be destroyed
:param migrate_data: implementation specific params
"""
raise NotImplementedError()
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot the specified instance.
After this is called successfully, the instance's state
goes back to power_state.RUNNING. The virtualization
platform should ensure that the reboot action has completed
successfully even in cases in which the underlying domain/vm
is paused or halted/stopped.
:param instance: nova.objects.instance.Instance
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param reboot_type: Either a HARD or SOFT reboot
:param block_device_info: Info pertaining to attached volumes
:param bad_volumes_callback: Function to handle any bad volumes
encountered
"""
raise NotImplementedError()
def get_console_pool_info(self, console_type):
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_console_output(self, context, instance):
"""Get console output for an instance
:param context: security context
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_vnc_console(self, context, instance):
"""Get connection info for a vnc console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleVNC
"""
raise NotImplementedError()
def get_spice_console(self, context, instance):
"""Get connection info for a spice console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleSpice
"""
raise NotImplementedError()
def get_rdp_console(self, context, instance):
"""Get connection info for a rdp console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleRDP
"""
raise NotImplementedError()
def get_serial_console(self, context, instance, index=0, at_port=None):
"""Get connection info for a serial console.
:param context: security context
:param instance: nova.objects.instance.Instance
:param index: int
:returns index'th instance of console.type.ConsoleSerial
"""
raise NotImplementedError()
def get_mks_console(self, context, instance):
"""Get connection info for a MKS console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleMKS
"""
raise NotImplementedError()
def get_diagnostics(self, instance):
"""Return diagnostics data about the given instance.
:param nova.objects.instance.Instance instance:
The instance to which the diagnostic data should be returned.
:return: Has a big overlap to the return value of the newer interface
:func:`get_instance_diagnostics`
:rtype: dict
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_instance_diagnostics(self, instance):
"""Return diagnostics data about the given instance.
:param nova.objects.instance.Instance instance:
The instance to which the diagnostic data should be returned.
:return: Has a big overlap to the return value of the older interface
:func:`get_diagnostics`
:rtype: nova.virt.diagnostics.Diagnostics
"""
raise NotImplementedError()
def get_all_bw_counters(self, instances):
"""Return bandwidth usage counters for each interface on each
running VM.
:param instances: nova.objects.instance.InstanceList
"""
raise NotImplementedError()
def get_all_volume_usage(self, context, compute_host_bdms):
"""Return usage info for volumes attached to vms on
a given host.-
"""
raise NotImplementedError()
def get_host_ip_addr(self):
"""Retrieves the IP address of the dom0
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach the disk to the instance at mountpoint using info."""
raise NotImplementedError()
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
"""Detach the disk attached to the instance."""
raise NotImplementedError()
def swap_volume(self, old_connection_info, new_connection_info,
instance, mountpoint, resize_to):
"""Replace the volume attached to the given `instance`.
:param dict old_connection_info:
The volume for this connection gets detached from the given
`instance`.
:param dict new_connection_info:
The volume for this connection gets attached to the given
'instance'.
:param nova.objects.instance.Instance instance:
The instance whose volume gets replaced by another one.
:param str mountpoint:
The mountpoint in the instance where the volume for
`old_connection_info` is attached to.
:param int resize_to:
If the new volume is larger than the old volume, it gets resized
to the given size (in Gigabyte) of `resize_to`.
:return: None
"""
raise NotImplementedError()
def attach_interface(self, instance, image_meta, vif):
"""Use hotplug to add a network interface to a running instance.
The counter action to this is :func:`detach_interface`.
:param nova.objects.instance.Instance instance:
The instance which will get an additional network interface.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param nova.network.model.NetworkInfo vif:
The object which has the information about the interface to attach.
:raise nova.exception.NovaException: If the attach fails.
:return: None
"""
raise NotImplementedError()
def detach_interface(self, instance, vif):
"""Use hotunplug to remove a network interface from a running instance.
The counter action to this is :func:`attach_interface`.
:param nova.objects.instance.Instance instance:
The instance which gets a network interface removed.
:param nova.network.model.NetworkInfo vif:
The object which has the information about the interface to detach.
:raise nova.exception.NovaException: If the detach fails.
:return: None
"""
raise NotImplementedError()
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
:param nova.objects.instance.Instance instance:
The instance whose disk should be migrated.
:param str dest:
The IP address of the destination host.
:param nova.objects.flavor.Flavor flavor:
The flavor of the instance whose disk get migrated.
:param nova.network.model.NetworkInfo network_info:
The network information of the given `instance`.
:param dict block_device_info:
Information about the block devices.
:param int timeout:
The time in seconds to wait for the guest OS to shutdown.
:param int retry_interval:
How often to signal guest while waiting for it to shutdown.
:return: A list of disk information dicts in JSON format.
:rtype: str
"""
raise NotImplementedError()
def snapshot(self, context, instance, image_id, update_task_state):
"""Snapshots the specified instance.
:param context: security context
:param instance: nova.objects.instance.Instance
:param image_id: Reference to a pre-created image that will
hold the snapshot.
"""
raise NotImplementedError()
def post_interrupted_snapshot_cleanup(self, context, instance):
"""Cleans up any resources left after an interrupted snapshot.
:param context: security context
:param instance: nova.objects.instance.Instance
"""
pass
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
"""Completes a resize/migration.
:param context: the context for the migration/resize
:param migration: the migrate/resize information
:param instance: nova.objects.instance.Instance being migrated/resized
:param disk_info: the newly transferred disk information
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param resize_instance: True if the instance is being resized,
False otherwise
:param block_device_info: instance volume block device info
:param power_on: True if the instance should be powered on, False
otherwise
"""
raise NotImplementedError()
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize/migration, destroying the source VM.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
"""Finish reverting a resize/migration.
:param context: the context for the finish_revert_migration
:param instance: nova.objects.instance.Instance being migrated/resized
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: instance volume block device info
:param power_on: True if the instance should be powered on, False
otherwise
"""
raise NotImplementedError()
def pause(self, instance):
"""Pause the given instance.
A paused instance doesn't use CPU cycles of the host anymore. The
state of the VM could be stored in the memory or storage space of the
host, depending on the underlying hypervisor technology.
A "stronger" version of `pause` is :func:'suspend'.
The counter action for `pause` is :func:`unpause`.
:param nova.objects.instance.Instance instance:
The instance which should be paused.
:return: None
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def unpause(self, instance):
"""Unpause the given paused instance.
The paused instance gets unpaused and will use CPU cycles of the
host again. The counter action for 'unpause' is :func:`pause`.
Depending on the underlying hypervisor technology, the guest has the
same state as before the 'pause'.
:param nova.objects.instance.Instance instance:
The instance which should be unpaused.
:return: None
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def suspend(self, context, instance):
"""Suspend the specified instance.
A suspended instance doesn't use CPU cycles or memory of the host
anymore. The state of the instance could be persisted on the host
and allocate storage space this way. A "softer" way of `suspend`
is :func:`pause`. The counter action for `suspend` is :func:`resume`.
:param nova.context.RequestContext context:
The context for the suspend.
:param nova.objects.instance.Instance instance:
The instance to suspend.
:return: None
"""
raise NotImplementedError()
def resume(self, context, instance, network_info, block_device_info=None):
"""resume the specified suspended instance.
The suspended instance gets resumed and will use CPU cycles and memory
of the host again. The counter action for 'resume' is :func:`suspend`.
Depending on the underlying hypervisor technology, the guest has the
same state as before the 'suspend'.
:param nova.context.RequestContext context:
The context for the resume.
:param nova.objects.instance.Instance instance:
The suspended instance to resume.
:param nova.network.model.NetworkInfo network_info:
Necessary network information for the resume.
:param dict block_device_info:
Instance volume block device info.
:return: None
"""
raise NotImplementedError()
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
"""resume guest state when a host is booted.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance.
:param nova.context.RequestContext context:
The context for the rescue.
:param nova.objects.instance.Instance instance:
The instance being rescued.
:param nova.network.model.NetworkInfo network_info:
Necessary network information for the resume.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param rescue_password: new root password to set for rescue.
"""
raise NotImplementedError()
def set_bootable(self, instance, is_bootable):
"""Set the ability to power on/off an instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def unrescue(self, instance, network_info):
"""Unrescue the specified instance.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def power_off(self, instance, timeout=0, retry_interval=0):
"""Power off the specified instance.
:param instance: nova.objects.instance.Instance
:param timeout: time to wait for GuestOS to shutdown
:param retry_interval: How often to signal guest while
waiting for it to shutdown
"""
raise NotImplementedError()
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def trigger_crash_dump(self, instance):
"""Trigger crash dump mechanism on the given instance.
Stalling instances can be triggered to dump the crash data. How the
guest OS reacts in details, depends on the configuration of it.
:param nova.objects.instance.Instance instance:
The instance where the crash dump should be triggered.
:return: None
"""
raise NotImplementedError()
def soft_delete(self, instance):
"""Soft delete the specified instance.
A soft-deleted instance doesn't allocate any resources anymore, but is
still available as a database entry. The counter action :func:`restore`
uses the database entry to create a new instance based on that.
:param nova.objects.instance.Instance instance:
The instance to soft-delete.
:return: None
"""
raise NotImplementedError()
def restore(self, instance):
"""Restore the specified soft-deleted instance.
The restored instance will be automatically booted. The counter action
for `restore` is :func:`soft_delete`.
:param nova.objects.instance.Instance instance:
The soft-deleted instance which should be restored from the
soft-deleted data.
:return: None
"""
raise NotImplementedError()
def get_available_resource(self, nodename):
"""Retrieve resource information.
This method is called when nova-compute launches, and
as part of a periodic task that records the results in the DB.
:param nodename:
node which the caller want to get resources from
a driver that manages only one node can safely ignore this
:returns: Dictionary describing resources
"""
raise NotImplementedError()
def pre_live_migration(self, context, instance, block_device_info,
network_info, disk_info, migrate_data=None):
"""Prepare an instance for live migration
:param context: security context
:param instance: nova.objects.instance.Instance object
:param block_device_info: instance block device information
:param network_info: instance network information
:param disk_info: instance disk information
:param migrate_data: a LiveMigrateData object
"""
raise NotImplementedError()
def live_migration(self, context, instance, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
"""Live migration of an instance to another host.
:param context: security context
:param instance:
nova.db.sqlalchemy.models.Instance object
instance object that is migrated.
:param dest: destination host
:param post_method:
post operation method.
expected nova.compute.manager._post_live_migration.
:param recover_method:
recovery method when any exception occurs.
expected nova.compute.manager._rollback_live_migration.
:param block_migration: if true, migrate VM disk.
:param migrate_data: a LiveMigrateData object
"""
raise NotImplementedError()
def live_migration_force_complete(self, instance):
"""Force live migration to complete
:param instance: Instance being live migrated
"""
raise NotImplementedError()
def live_migration_abort(self, instance):
"""Abort an in-progress live migration.
:param instance: instance that is live migrating
"""
raise NotImplementedError()
def rollback_live_migration_at_destination(self, context, instance,
network_info,
block_device_info,
destroy_disks=True,
migrate_data=None):
"""Clean up destination node after a failed live migration.
:param context: security context
:param instance: instance object that was being migrated
:param network_info: instance network information
:param block_device_info: instance block device information
:param destroy_disks:
if true, destroy disks at destination during cleanup
:param migrate_data: a LiveMigrateData object
"""
raise NotImplementedError()
def post_live_migration(self, context, instance, block_device_info,
migrate_data=None):
"""Post operation of live migration at source host.
:param context: security context
:instance: instance object that was migrated
:block_device_info: instance block device information
:param migrate_data: a LiveMigrateData object
"""
pass
def post_live_migration_at_source(self, context, instance, network_info):
"""Unplug VIFs from networks at source.
:param context: security context
:param instance: instance object reference
:param network_info: instance network information
"""
raise NotImplementedError(_("Hypervisor driver does not support "
"post_live_migration_at_source method"))
def post_live_migration_at_destination(self, context, instance,
network_info,
block_migration=False,
block_device_info=None):
"""Post operation of live migration at destination host.
:param context: security context
:param instance: instance object that is migrated
:param network_info: instance network information
:param block_migration: if true, post operation of block_migration.
"""
raise NotImplementedError()
def check_instance_shared_storage_local(self, context, instance):
"""Check if instance files located on shared storage.
This runs check on the destination host, and then calls
back to the source host to check the results.
:param context: security context
:param instance: nova.objects.instance.Instance object
"""
raise NotImplementedError()
def check_instance_shared_storage_remote(self, context, data):
"""Check if instance files located on shared storage.
:param context: security context
:param data: result of check_instance_shared_storage_local
"""
raise NotImplementedError()
def check_instance_shared_storage_cleanup(self, context, data):
"""Do cleanup on host after check_instance_shared_storage calls
:param context: security context
:param data: result of check_instance_shared_storage_local
"""
pass
def check_can_live_migrate_destination(self, context, instance,
src_compute_info, dst_compute_info,
block_migration=False,
disk_over_commit=False):
"""Check if it is possible to execute live migration.
This runs checks on the destination host, and then calls
back to the source host to check the results.
:param context: security context
:param instance: nova.db.sqlalchemy.models.Instance
:param src_compute_info: Info about the sending machine
:param dst_compute_info: Info about the receiving machine
:param block_migration: if true, prepare for block migration
:param disk_over_commit: if true, allow disk over commit
:returns: a LiveMigrateData object (hypervisor-dependent)
"""
raise NotImplementedError()
def check_can_live_migrate_destination_cleanup(self, context,
dest_check_data):
"""Do required cleanup on dest host after check_can_live_migrate calls
:param context: security context
:param dest_check_data: result of check_can_live_migrate_destination
"""
raise NotImplementedError()
def check_can_live_migrate_source(self, context, instance,
dest_check_data, block_device_info=None):
"""Check if it is possible to execute live migration.
This checks if the live migration can succeed, based on the
results from check_can_live_migrate_destination.
:param context: security context
:param instance: nova.db.sqlalchemy.models.Instance
:param dest_check_data: result of check_can_live_migrate_destination
:param block_device_info: result of _get_instance_block_device_info
:returns: a LiveMigrateData object
"""
raise NotImplementedError()
def get_instance_disk_info(self, instance,
block_device_info=None):
"""Retrieve information about actual disk sizes of an instance.
:param instance: nova.objects.Instance
:param block_device_info:
Optional; Can be used to filter out devices which are
actually volumes.
:return:
json strings with below format::
"[{'path':'disk',
'type':'raw',
'virt_disk_size':'10737418240',
'backing_file':'backing_file',
'disk_size':'83886080'
'over_committed_disk_size':'10737418240'},
...]"
"""
raise NotImplementedError()
def refresh_security_group_rules(self, security_group_id):
"""This method is called after a change to security groups.
All security groups and their associated rules live in the datastore,
and calling this method should apply the updated rules to instances
running the specified security group.
An error should be raised if the operation cannot complete.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def refresh_instance_security_rules(self, instance):
"""Refresh security group rules
Gets called when an instance gets added to or removed from
the security group the instance is a member of or if the
group gains or loses a rule.
"""
raise NotImplementedError()
def reset_network(self, instance):
"""reset networking for specified instance."""
# TODO(Vek): Need to pass context in for access to auth_token
pass
def ensure_filtering_rules_for_instance(self, instance, network_info):
"""Setting up filtering rules and waiting for its completion.
To migrate an instance, filtering rules to hypervisors
and firewalls are inevitable on destination host.
( Waiting only for filtering rules to hypervisor,
since filtering rules to firewall rules can be set faster).
Concretely, the below method must be called.
- setup_basic_filtering (for nova-basic, etc.)
- prepare_instance_filter(for nova-instance-instance-xxx, etc.)
to_xml may have to be called since it defines PROJNET, PROJMASK.
but libvirt migrates those value through migrateToURI(),
so , no need to be called.
Don't use thread for this method since migration should
not be started when setting-up filtering rules operations
are not completed.
:param instance: nova.objects.instance.Instance object
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def filter_defer_apply_on(self):
"""Defer application of IPTables rules."""
pass
def filter_defer_apply_off(self):
"""Turn off deferral of IPTables rules and apply the rules now."""
pass
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance."""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def set_admin_password(self, instance, new_pass):
"""Set the root password on the specified instance.
:param instance: nova.objects.instance.Instance
:param new_pass: the new password
"""
raise NotImplementedError()
def inject_file(self, instance, b64_path, b64_contents):
"""Writes a file on the specified instance.
The first parameter is an instance of nova.compute.service.Instance,
and so the instance is being specified as instance.name. The second
parameter is the base64-encoded path to which the file is to be
written on the instance; the third is the contents of the file, also
base64-encoded.
NOTE(russellb) This method is deprecated and will be removed once it
can be removed from nova.compute.manager.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def change_instance_metadata(self, context, instance, diff):
"""Applies a diff to the instance metadata.
This is an optional driver method which is used to publish
changes to the instance's metadata to the hypervisor. If the
hypervisor has no means of publishing the instance metadata to
the instance, then this method should not be implemented.
:param context: security context
:param instance: nova.objects.instance.Instance
"""
pass
def inject_network_info(self, instance, nw_info):
"""inject network info for specified instance."""
# TODO(Vek): Need to pass context in for access to auth_token
pass
def poll_rebooting_instances(self, timeout, instances):
"""Perform a reboot on all given 'instances'.
Reboots the given `instances` which are longer in the rebooting state
than `timeout` seconds.
:param int timeout:
The timeout (in seconds) for considering rebooting instances
to be stuck.
:param list instances:
A list of nova.objects.instance.Instance objects that have been
in rebooting state longer than the configured timeout.
:return: None
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def host_power_action(self, action):
"""Reboots, shuts down or powers up the host.
:param str action:
The action the host should perform. The valid actions are:
""startup", "shutdown" and "reboot".
:return: The result of the power action
:rtype: : str
"""
raise NotImplementedError()
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window.
On start, it triggers the migration of all instances to other hosts.
Consider the combination with :func:`set_host_enabled`.
:param str host:
The name of the host whose maintenance mode should be changed.
:param bool mode:
If `True`, go into maintenance mode. If `False`, leave the
maintenance mode.
:return: "on_maintenance" if switched to maintenance mode or
"off_maintenance" if maintenance mode got left.
:rtype: str
"""
raise NotImplementedError()
def set_host_enabled(self, enabled):
"""Sets the ability of this host to accept new instances.
:param bool enabled:
If this is `True`, the host will accept new instances. If it is
`False`, the host won't accept new instances.
:return: If the host can accept further instances, return "enabled",
if further instances shouldn't be scheduled to this host,
return "disabled".
:rtype: str
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_host_uptime(self):
"""Returns the result of calling the Linux command `uptime` on this
host.
:return: A text which contains the uptime of this host since the
last boot.
:rtype: str
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def plug_vifs(self, instance, network_info):
"""Plug virtual interfaces (VIFs) into the given `instance` at
instance boot time.
The counter action is :func:`unplug_vifs`.
:param nova.objects.instance.Instance instance:
The instance which gets VIFs plugged.
:param nova.network.model.NetworkInfo network_info:
The object which contains information about the VIFs to plug.
:return: None
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def unplug_vifs(self, instance, network_info):
# NOTE(markus_z): 2015-08-18
# The compute manager doesn't use this interface, which seems odd
# since the manager should be the controlling thing here.
"""Unplug virtual interfaces (VIFs) from networks.
The counter action is :func:`plug_vifs`.
:param nova.objects.instance.Instance instance:
The instance which gets VIFs unplugged.
:param nova.network.model.NetworkInfo network_info:
The object which contains information about the VIFs to unplug.
:return: None
"""
raise NotImplementedError()
def get_host_cpu_stats(self):
"""Get the currently known host CPU stats.
:returns: a dict containing the CPU stat info, eg:
| {'kernel': kern,
| 'idle': idle,
| 'user': user,
| 'iowait': wait,
| 'frequency': freq},
where kern and user indicate the cumulative CPU time
(nanoseconds) spent by kernel and user processes
respectively, idle indicates the cumulative idle CPU time
(nanoseconds), wait indicates the cumulative I/O wait CPU
time (nanoseconds), since the host is booting up; freq
indicates the current CPU frequency (MHz). All values are
long integers.
"""
raise NotImplementedError()
def block_stats(self, instance, disk_id):
"""Return performance counters associated with the given disk_id on the
given instance. These are returned as [rd_req, rd_bytes, wr_req,
wr_bytes, errs], where rd indicates read, wr indicates write, req is
the total number of I/O requests made, bytes is the total number of
bytes transferred, and errs is the number of requests held up due to a
full pipeline.
All counters are long integers.
This method is optional. On some platforms (e.g. XenAPI) performance
statistics can be retrieved directly in aggregate form, without Nova
having to do the aggregation. On those platforms, this method is
unused.
Note that this function takes an instance ID.
"""
raise NotImplementedError()
def deallocate_networks_on_reschedule(self, instance):
"""Does the driver want networks deallocated on reschedule?"""
return False
def macs_for_instance(self, instance):
"""What MAC addresses must this instance have?
Some hypervisors (such as bare metal) cannot do freeform virtualisation
of MAC addresses. This method allows drivers to return a set of MAC
addresses that the instance is to have. allocate_for_instance will take
this into consideration when provisioning networking for the instance.
Mapping of MAC addresses to actual networks (or permitting them to be
freeform) is up to the network implementation layer. For instance,
with openflow switches, fixed MAC addresses can still be virtualised
onto any L2 domain, with arbitrary VLANs etc, but regular switches
require pre-configured MAC->network mappings that will match the
actual configuration.
Most hypervisors can use the default implementation which returns None.
Hypervisors with MAC limits should return a set of MAC addresses, which
will be supplied to the allocate_for_instance call by the compute
manager, and it is up to that call to ensure that all assigned network
details are compatible with the set of MAC addresses.
This is called during spawn_instance by the compute manager.
:return: None, or a set of MAC ids (e.g. set(['12:34:56:78:90:ab'])).
None means 'no constraints', a set means 'these and only these
MAC addresses'.
"""
return None
def dhcp_options_for_instance(self, instance):
"""Get DHCP options for this instance.
Some hypervisors (such as bare metal) require that instances boot from
the network, and manage their own TFTP service. This requires passing
the appropriate options out to the DHCP service. Most hypervisors can
use the default implementation which returns None.
This is called during spawn_instance by the compute manager.
Note that the format of the return value is specific to the Neutron
client API.
:return: None, or a set of DHCP options, eg:
| [{'opt_name': 'bootfile-name',
| 'opt_value': '/tftpboot/path/to/config'},
| {'opt_name': 'server-ip-address',
| 'opt_value': '1.2.3.4'},
| {'opt_name': 'tftp-server',
| 'opt_value': '1.2.3.4'}
| ]
"""
return None
def manage_image_cache(self, context, all_instances):
"""Manage the driver's local image cache.
Some drivers chose to cache images for instances on disk. This method
is an opportunity to do management of that cache which isn't directly
related to other calls into the driver. The prime example is to clean
the cache and remove images which are no longer of interest.
:param all_instances: nova.objects.instance.InstanceList
"""
pass
def add_to_aggregate(self, context, aggregate, host, **kwargs):
"""Add a compute host to an aggregate.
The counter action to this is :func:`remove_from_aggregate`
:param nova.context.RequestContext context:
The security context.
:param nova.objects.aggregate.Aggregate aggregate:
The aggregate which should add the given `host`
:param str host:
The name of the host to add to the given `aggregate`.
:param dict kwargs:
A free-form thingy...
:return: None
"""
# NOTE(jogo) Currently only used for XenAPI-Pool
raise NotImplementedError()
def remove_from_aggregate(self, context, aggregate, host, **kwargs):
"""Remove a compute host from an aggregate.
The counter action to this is :func:`add_to_aggregate`
:param nova.context.RequestContext context:
The security context.
:param nova.objects.aggregate.Aggregate aggregate:
The aggregate which should remove the given `host`
:param str host:
The name of the host to remove from the given `aggregate`.
:param dict kwargs:
A free-form thingy...
:return: None
"""
raise NotImplementedError()
def undo_aggregate_operation(self, context, op, aggregate,
host, set_error=True):
"""Undo for Resource Pools."""
raise NotImplementedError()
def get_volume_connector(self, instance):
"""Get connector information for the instance for attaching to volumes.
Connector information is a dictionary representing the ip of the
machine that will be making the connection, the name of the iscsi
initiator and the hostname of the machine as follows::
{
'ip': ip,
'initiator': initiator,
'host': hostname
}
"""
raise NotImplementedError()
def get_available_nodes(self, refresh=False):
"""Returns nodenames of all nodes managed by the compute service.
This method is for multi compute-nodes support. If a driver supports
multi compute-nodes, this method returns a list of nodenames managed
by the service. Otherwise, this method should return
[hypervisor_hostname].
"""
raise NotImplementedError()
def node_is_available(self, nodename):
"""Return whether this compute service manages a particular node."""
if nodename in self.get_available_nodes():
return True
# Refresh and check again.
return nodename in self.get_available_nodes(refresh=True)
def get_per_instance_usage(self):
"""Get information about instance resource usage.
:returns: dict of nova uuid => dict of usage info
"""
return {}
def instance_on_disk(self, instance):
"""Checks access of instance files on the host.
:param instance: nova.objects.instance.Instance to lookup
Returns True if files of an instance with the supplied ID accessible on
the host, False otherwise.
.. note::
Used in rebuild for HA implementation and required for validation
of access to instance shared disk files
"""
return False
def register_event_listener(self, callback):
"""Register a callback to receive events.
Register a callback to receive asynchronous event
notifications from hypervisors. The callback will
be invoked with a single parameter, which will be
an instance of the nova.virt.event.Event class.
"""
self._compute_event_callback = callback
def emit_event(self, event):
"""Dispatches an event to the compute manager.
Invokes the event callback registered by the
compute manager to dispatch the event. This
must only be invoked from a green thread.
"""
if not self._compute_event_callback:
LOG.debug("Discarding event %s", six.text_type(event))
return
if not isinstance(event, virtevent.Event):
raise ValueError(
_("Event must be an instance of nova.virt.event.Event"))
try:
LOG.debug("Emitting event %s", six.text_type(event))
self._compute_event_callback(event)
except Exception as ex:
LOG.error(_LE("Exception dispatching event %(event)s: %(ex)s"),
{'event': event, 'ex': ex})
def delete_instance_files(self, instance):
"""Delete any lingering instance files for an instance.
:param instance: nova.objects.instance.Instance
:returns: True if the instance was deleted from disk, False otherwise.
"""
return True
@property
def need_legacy_block_device_info(self):
"""Tell the caller if the driver requires legacy block device info.
Tell the caller whether we expect the legacy format of block
device info to be passed in to methods that expect it.
"""
return True
def volume_snapshot_create(self, context, instance, volume_id,
create_info):
"""Snapshots volumes attached to a specified instance.
The counter action to this is :func:`volume_snapshot_delete`
:param nova.context.RequestContext context:
The security context.
:param nova.objects.instance.Instance instance:
The instance that has the volume attached
:param uuid volume_id:
Volume to be snapshotted
:param create_info: The data needed for nova to be able to attach
to the volume. This is the same data format returned by
Cinder's initialize_connection() API call. In the case of
doing a snapshot, it is the image file Cinder expects to be
used as the active disk after the snapshot operation has
completed. There may be other data included as well that is
needed for creating the snapshot.
"""
raise NotImplementedError()
def volume_snapshot_delete(self, context, instance, volume_id,
snapshot_id, delete_info):
"""Deletes a snapshot of a volume attached to a specified instance.
The counter action to this is :func:`volume_snapshot_create`
:param nova.context.RequestContext context:
The security context.
:param nova.objects.instance.Instance instance:
The instance that has the volume attached.
:param uuid volume_id:
Attached volume associated with the snapshot
:param uuid snapshot_id:
The snapshot to delete.
:param dict delete_info:
Volume backend technology specific data needed to be able to
complete the snapshot. For example, in the case of qcow2 backed
snapshots, this would include the file being merged, and the file
being merged into (if appropriate).
:return: None
"""
raise NotImplementedError()
def default_root_device_name(self, instance, image_meta, root_bdm):
"""Provide a default root device name for the driver.
:param nova.objects.instance.Instance instance:
The instance to get the root device for.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param nova.objects.BlockDeviceMapping root_bdm:
The description of the root device.
"""
raise NotImplementedError()
def default_device_names_for_instance(self, instance, root_device_name,
*block_device_lists):
"""Default the missing device names in the block device mapping."""
raise NotImplementedError()
def get_device_name_for_instance(self, instance,
bdms, block_device_obj):
"""Get the next device name based on the block device mapping.
:param instance: nova.objects.instance.Instance that volume is
requesting a device name
:param bdms: a nova.objects.BlockDeviceMappingList for the instance
:param block_device_obj: A nova.objects.BlockDeviceMapping instance
with all info about the requested block
device. device_name does not need to be set,
and should be decided by the driver
implementation if not set.
:returns: The chosen device name.
"""
raise NotImplementedError()
def is_supported_fs_format(self, fs_type):
"""Check whether the file format is supported by this driver
:param fs_type: the file system type to be checked,
the validate values are defined at disk API module.
"""
# NOTE(jichenjc): Return False here so that every hypervisor
# need to define their supported file system
# type and implement this function at their
# virt layer.
return False
def quiesce(self, context, instance, image_meta):
"""Quiesce the specified instance to prepare for snapshots.
If the specified instance doesn't support quiescing,
InstanceQuiesceNotSupported is raised. When it fails to quiesce by
other errors (e.g. agent timeout), NovaException is raised.
:param context: request context
:param instance: nova.objects.instance.Instance to be quiesced
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
"""
raise NotImplementedError()
def unquiesce(self, context, instance, image_meta):
"""Unquiesce the specified instance after snapshots.
If the specified instance doesn't support quiescing,
InstanceQuiesceNotSupported is raised. When it fails to quiesce by
other errors (e.g. agent timeout), NovaException is raised.
:param context: request context
:param instance: nova.objects.instance.Instance to be unquiesced
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
"""
raise NotImplementedError()
def network_binding_host_id(self, context, instance):
"""Get host ID to associate with network ports.
:param context: request context
:param instance: nova.objects.instance.Instance that the network
ports will be associated with
:returns: a string representing the host ID
"""
return instance.get('host')
def load_compute_driver(virtapi, compute_driver=None):
"""Load a compute driver module.
Load the compute driver module specified by the compute_driver
configuration option or, if supplied, the driver name supplied as an
argument.
Compute drivers constructors take a VirtAPI object as their first object
and this must be supplied.
:param virtapi: a VirtAPI instance
:param compute_driver: a compute driver name to override the config opt
:returns: a ComputeDriver instance
"""
if not compute_driver:
compute_driver = CONF.compute_driver
if not compute_driver:
LOG.error(_LE("Compute driver option required, but not specified"))
sys.exit(1)
LOG.info(_LI("Loading compute driver '%s'"), compute_driver)
try:
driver = importutils.import_object_ns('nova.virt',
compute_driver,
virtapi)
return utils.check_isinstance(driver, ComputeDriver)
except ImportError:
LOG.exception(_LE("Unable to load the virtualization driver"))
sys.exit(1)
def is_xenapi():
return CONF.compute_driver == 'xenapi.XenAPIDriver'
|
Snergster/virl-salt
|
openstack/nova/files/mitaka/nova+virt+driver.py
|
Python
|
gpl-2.0
| 64,183
|