repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
IIIT-Delhi/jobport
|
placement/jobport/urls.py
|
1
|
4188
|
# //=======================================================================
# // Copyright JobPort, IIIT Delhi 2015.
# // Distributed under the MIT License.
# // (See accompanying file LICENSE or copy at
# // http://opensource.org/licenses/MIT)
# //=======================================================================
# __author__ = 'naman'
from django.conf.urls import patterns, url
from jobport import views
handler404 = 'views.my_404_view'
urlpatterns = patterns('',
url(r'^$', views.home, name='home'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^needlogin/$', views.needlogin, name='needlogin'),
url(r'^newuser/$', views.newuser, name='newuser'),
url(r'^openjob/$', views.openjob, name='openjob'),
url(r'^profile/$', views.profile, name='profile'),
url(r'^stats/$', views.stats, name='stats'),
url(r'^uploadcgpa/$', views.uploadcgpa, name='uploadcgpa'),
url(r'^students/(?P<studentid>.*)/edit/$',
views.admineditstudent, name='admineditstudent'),
url(r'^job/(?P<jobid>\d+)/$',
views.jobpage, name='jobpage'),
url(r'^job/(?P<jobid>\d+)/apply/$',
views.jobapply, name='jobapply'),
url(r'^job/(?P<jobid>\d+)/withdraw/$',
views.jobwithdraw, name='jobwithdraw'),
url(r'^job/(?P<jobid>\d+)/edit/$',
views.jobedit, name='jobedit'),
url(r'^job/(?P<jobid>\d+)/delete/$',
views.jobdelete, name='jobdelete'),
url(r'^job/(?P<jobid>\d+)/sendselectedemail/$', views.sendselectedemail,
name='sendselectedemail'),
url(r'^job/(?P<jobid>\d+)/applicants/$',
views.jobapplicants, name='jobapplicants'),
url(r'^job/(?P<jobid>\d+)/getresume/$',
views.getresumes, name='jobgetresumes'),
url(r'^job/(?P<jobid>\d+)/getcsv/$',
views.getjobcsv, name='jobgetcsvs'),
url(r'^job/(?P<jobid>\d+)/selections/$',
views.adminjobselected, name='adminjobselected'),
url(r'^myapplications/$', views.myapplications,
name='myapplications'),
url(r'^batches/$', views.viewbatches, name='viewbatches'),
url(r'^openbatch/$', views.batchcreate, name='openbatch'),
url(r'^batch/(?P<batchid>\d+)/$',
views.batchpage, name='batchpage'),
url(r'^batch/(?P<batchid>\d+)/delete/$',
views.batchdestroy, name='batchdestroy'),
url(r'^batch/(?P<batchid>\d+)/edit/$',
views.batchedit, name='batchedit'),
url(r'^batch/(?P<batchid>\d+)/getbatchlist/$',
views.getbatchlist, name='getbatchlist'),
url(r'^batch/(?P<batchid>\d+)/addstudentstobatch/$', views.uploadstudentsinbatch,
name='uploadstudentsinbatch'),
url(r'^batch/(?P<batchid>\d+)/getbatchresume/$',
views.getbatchresumes, name='getbatchresumes'),
url(r'^feedback/$', views.feedback, name='feedback'),
url(r'^extraStuff/$', views.blockedUnplacedlist,
name='blockedUnplacedlist'),
url(r'files/resume/(.+)', views.fileview, name='fileview'),
url(r'files/jobfiles/(.+)',
views.docfileview, name='docfileview'),
url(r'search/results/$', views.search, name='search'),
# url(r'material.min.js.map$',views.test,name='test'),
)
|
mit
| -534,863,692,878,665,340
| 57.985915
| 104
| 0.443649
| false
| 4.474359
| false
| true
| false
|
bklang/GO2
|
stats.py
|
1
|
3669
|
#
# stats class for Gig-o-Matic 2
#
# Aaron Oppenheimer
# 29 Jan 2014
#
from google.appengine.ext import ndb
from requestmodel import *
import webapp2_extras.appengine.auth.models
import webapp2
from debug import *
import assoc
import gig
import band
import member
import logging
import json
def stats_key(member_name='stats_key'):
"""Constructs a Datastore key for a Stats entity with stats_name."""
return ndb.Key('stats', stats_name)
class BandStats(ndb.Model):
""" class to hold statistics """
band = ndb.KeyProperty()
date = ndb.DateProperty(auto_now_add=True)
number_members = ndb.IntegerProperty()
number_upcoming_gigs = ndb.IntegerProperty()
number_gigs_created_today = ndb.IntegerProperty()
def get_band_stats(the_band_key):
""" Return all the stats we have for a band """
stats_query = BandStats.query( BandStats.band==the_band_key).order(-BandStats.date)
the_stats = stats_query.fetch(limit=30)
return the_stats
def make_band_stats(the_band_key):
""" make a stats object for a band key and return it """
the_stats = BandStats(band=the_band_key)
all_member_keys = assoc.get_member_keys_of_band_key(the_band_key)
the_stats.number_members = len(all_member_keys)
logging.info("band {0} stats: {1} members".format(the_band_key.id(), the_stats.number_members))
all_gigs = gig.get_gigs_for_band_keys(the_band_key, keys_only=True)
the_stats.number_upcoming_gigs = len(all_gigs)
logging.info("band {0} stats: {1} upcoming gigs".format(the_band_key.id(), the_stats.number_upcoming_gigs))
today_gigs = gig.get_gigs_for_creation_date(the_band_key, the_stats.date)
the_stats.number_gigs_created_today = len(today_gigs)
the_stats.put()
def delete_band_stats(the_band_key):
""" delete all stats for a band """
stats_query = BandStats.query( BandStats.band==the_band_key)
the_stats = stats_query.fetch(keys_only=True)
ndb.delete_multi(the_stats)
#####
#
# Page Handlers
#
#####
class StatsPage(BaseHandler):
"""Page for showing stats"""
@user_required
def get(self):
self._make_page(the_user=self.user)
def _make_page(self,the_user):
the_member_keys = member.get_all_members(order=False, keys_only=True, verified_only=True)
the_bands = band.get_all_bands()
stats=[]
inactive_bands=[]
for a_band in the_bands:
is_band_active=False
a_stat = get_band_stats(a_band.key)
the_count_data=[]
for s in a_stat:
if s.number_upcoming_gigs > 0:
is_band_active = True
the_count_data.append([s.date.year, s.date.month-1, s.date.day, s.number_members, s.number_upcoming_gigs])
if is_band_active:
the_count_data_json=json.dumps(the_count_data)
stats.append([a_band, the_count_data_json])
else:
inactive_bands.append(a_band)
template_args = {
'the_stats' : stats,
'num_members' : len(the_member_keys),
'num_bands' : len(the_bands),
'num_active_bands' : len(the_bands) - len(inactive_bands),
'inactive_bands' : inactive_bands
}
self.render_template('stats.html', template_args)
##########
#
# auto generate stats
#
##########
class AutoGenerateStats(BaseHandler):
""" automatically generate statistics """
def get(self):
the_band_keys = band.get_all_bands(keys_only = True)
for band_key in the_band_keys:
make_band_stats(band_key)
|
gpl-3.0
| 2,893,236,756,040,592,400
| 29.330579
| 122
| 0.620878
| false
| 3.305405
| false
| false
| false
|
ray-project/ray
|
rllib/env/wrappers/dm_control_wrapper.py
|
1
|
7328
|
"""
DeepMind Control Suite Wrapper directly sourced from:
https://github.com/denisyarats/dmc2gym
MIT License
Copyright (c) 2020 Denis Yarats
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from gym import core, spaces
try:
from dm_env import specs
except ImportError:
specs = None
try:
# Suppress MuJoCo warning (dm_control uses absl logging).
import absl.logging
absl.logging.set_verbosity("error")
from dm_control import suite
except (ImportError, OSError):
suite = None
import numpy as np
def _spec_to_box(spec):
def extract_min_max(s):
assert s.dtype == np.float64 or s.dtype == np.float32
dim = np.int(np.prod(s.shape))
if type(s) == specs.Array:
bound = np.inf * np.ones(dim, dtype=np.float32)
return -bound, bound
elif type(s) == specs.BoundedArray:
zeros = np.zeros(dim, dtype=np.float32)
return s.minimum + zeros, s.maximum + zeros
mins, maxs = [], []
for s in spec:
mn, mx = extract_min_max(s)
mins.append(mn)
maxs.append(mx)
low = np.concatenate(mins, axis=0)
high = np.concatenate(maxs, axis=0)
assert low.shape == high.shape
return spaces.Box(low, high, dtype=np.float32)
def _flatten_obs(obs):
obs_pieces = []
for v in obs.values():
flat = np.array([v]) if np.isscalar(v) else v.ravel()
obs_pieces.append(flat)
return np.concatenate(obs_pieces, axis=0)
class DMCEnv(core.Env):
def __init__(self,
domain_name,
task_name,
task_kwargs=None,
visualize_reward=False,
from_pixels=False,
height=64,
width=64,
camera_id=0,
frame_skip=2,
environment_kwargs=None,
channels_first=True,
preprocess=True):
self._from_pixels = from_pixels
self._height = height
self._width = width
self._camera_id = camera_id
self._frame_skip = frame_skip
self._channels_first = channels_first
self.preprocess = preprocess
if specs is None:
raise RuntimeError((
"The `specs` module from `dm_env` was not imported. Make sure "
"`dm_env` is installed and visible in the current python "
"environment."))
if suite is None:
raise RuntimeError(
("The `suite` module from `dm_control` was not imported. Make "
"sure `dm_control` is installed and visible in the current "
"python enviornment."))
# create task
self._env = suite.load(
domain_name=domain_name,
task_name=task_name,
task_kwargs=task_kwargs,
visualize_reward=visualize_reward,
environment_kwargs=environment_kwargs)
# true and normalized action spaces
self._true_action_space = _spec_to_box([self._env.action_spec()])
self._norm_action_space = spaces.Box(
low=-1.0,
high=1.0,
shape=self._true_action_space.shape,
dtype=np.float32)
# create observation space
if from_pixels:
shape = [3, height,
width] if channels_first else [height, width, 3]
self._observation_space = spaces.Box(
low=0, high=255, shape=shape, dtype=np.uint8)
if preprocess:
self._observation_space = spaces.Box(
low=-0.5, high=0.5, shape=shape, dtype=np.float32)
else:
self._observation_space = _spec_to_box(
self._env.observation_spec().values())
self._state_space = _spec_to_box(self._env.observation_spec().values())
self.current_state = None
def __getattr__(self, name):
return getattr(self._env, name)
def _get_obs(self, time_step):
if self._from_pixels:
obs = self.render(
height=self._height,
width=self._width,
camera_id=self._camera_id)
if self._channels_first:
obs = obs.transpose(2, 0, 1).copy()
if self.preprocess:
obs = obs / 255.0 - 0.5
else:
obs = _flatten_obs(time_step.observation)
return obs
def _convert_action(self, action):
action = action.astype(np.float64)
true_delta = self._true_action_space.high - self._true_action_space.low
norm_delta = self._norm_action_space.high - self._norm_action_space.low
action = (action - self._norm_action_space.low) / norm_delta
action = action * true_delta + self._true_action_space.low
action = action.astype(np.float32)
return action
@property
def observation_space(self):
return self._observation_space
@property
def state_space(self):
return self._state_space
@property
def action_space(self):
return self._norm_action_space
def step(self, action):
assert self._norm_action_space.contains(action)
action = self._convert_action(action)
assert self._true_action_space.contains(action)
reward = 0
extra = {"internal_state": self._env.physics.get_state().copy()}
for _ in range(self._frame_skip):
time_step = self._env.step(action)
reward += time_step.reward or 0
done = time_step.last()
if done:
break
obs = self._get_obs(time_step)
self.current_state = _flatten_obs(time_step.observation)
extra["discount"] = time_step.discount
return obs, reward, done, extra
def reset(self):
time_step = self._env.reset()
self.current_state = _flatten_obs(time_step.observation)
obs = self._get_obs(time_step)
return obs
def render(self, mode="rgb_array", height=None, width=None, camera_id=0):
assert mode == "rgb_array", "only support for rgb_array mode"
height = height or self._height
width = width or self._width
camera_id = camera_id or self._camera_id
return self._env.physics.render(
height=height, width=width, camera_id=camera_id)
|
apache-2.0
| -323,708,426,664,062,600
| 34.572816
| 79
| 0.600573
| false
| 3.97397
| false
| false
| false
|
nathanbjenx/cairis
|
cairis/bin/gt2pc.py
|
1
|
6864
|
#!/usr/bin/python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import string
import argparse
import csv
__author__ = 'Shamal Faily'
def remspace(my_str):
if len(my_str) < 2: # returns ' ' unchanged
return my_str
if my_str[-1] == '\n':
if my_str[-2] == ' ':
return my_str[:-2] + '\n'
if my_str[-1] == ' ':
return my_str[:-1]
return my_str
def main(args=None):
parser = argparse.ArgumentParser(description='Computer Aided Integration of Requirements and Information Security - Grounded Theory to Persona Case converter')
parser.add_argument('modelFile',help='model file to create')
parser.add_argument('--context',dest='contextName',help='model context')
parser.add_argument('--originator',dest='originatorName',help='model originator')
parser.add_argument('--concepts',dest='conceptsFile',help='grounded theory model concepts')
parser.add_argument('--propositions',dest='propositionsFile',help='Propositions associated with grounded theory model quotations')
parser.add_argument('--characteristics',dest='characteristicsFile',help='Persona characteristics associated with grounded theory model associations')
parser.add_argument('--narratives',dest='narrativesFile',help='Persona narratives')
args = parser.parse_args()
xmlHdr = '<?xml version="1.0"?>\n<!DOCTYPE cairis_model PUBLIC "-//CAIRIS//DTD MODEL 1.0//EN" "http://cairis.org/dtd/cairis_model.dtd">\n\n<cairis_model>\n\n'
xmlHdr += '<cairis>\n <project_settings name="' + args.contextName + '">\n <contributors>\n <contributor first_name="None" surname="None" affiliation="' + args.originatorName + '" role="Scribe" />\n </contributors>\n </project_settings>\n <environment name="' + args.contextName + '" short_code="' + args.contextName + '">\n <definition>' + args.contextName + '</definition>\n <asset_values>\n <none>TBC</none>\n <low>TBC</low>\n <medium>TBC</medium>\n <high>TBC</high>\n </asset_values>\n </environment>\n</cairis>\n\n<riskanalysis>\n <role name="Undefined" type="Stakeholder" short_code="UNDEF">\n <description>Undefined</description>\n </role>\n</riskanalysis>\n\n<usability>\n'
xmlBuf = ''
conceptDict = {}
with open(args.conceptsFile,'r') as cFile:
cReader = csv.reader(cFile, delimiter = ',', quotechar='"')
for row in cReader:
edCode = row[0]
edName = row[1] + ' GT concept'
conceptDict[edCode] = edName
edVersion = row[2]
edDate = row[3]
edAuthors = row[4]
xmlBuf += '<external_document name=\"' + edName + '\" version=\"' + edVersion + '\" date=\"' + edDate + '\" authors=\"' + edAuthors + '\">\n <description>' + edName + '</description>\n</external_document>\n'
xmlBuf += '\n'
propDict = {}
with open(args.propositionsFile,'r') as pFile:
pReader = csv.reader(pFile, delimiter = ',', quotechar='"')
for row in pReader:
pId = row[0]
edCode,pNo = pId.split('-')
docName = conceptDict[edCode]
pName = row[1]
pDesc = row[2]
pContrib = row[3]
propDict[pId] = (pName,pDesc)
xmlBuf += '<document_reference name=\"' + pName + '\" contributor=\"' + pContrib + '\" document=\"' + docName + '\">\n <excerpt>' + pDesc + '</excerpt>\n</document_reference>\n'
xmlBuf += '\n'
xmlBuf += '\n'
bvDict = {}
bvDict['ACT'] = 'Activities'
bvDict['ATT'] = 'Attitudes'
bvDict['APT'] = 'Aptitudes'
bvDict['MOT'] = 'Motivations'
bvDict['SKI'] = 'Skills'
bvDict['INT'] = 'Intrinsic'
bvDict['CON'] = 'Contextual'
personaNames = set([])
pcf = open(args.characteristicsFile,"r")
for li in pcf.readlines():
li = string.strip(li)
pce = li.split(',')
gtr = pce[0]
pcName = pce[1]
labelName = pce[2]
pName = pce[3]
if pName == 'NONE':
continue
personaNames.add(pName)
bvName = bvDict[pce[4]]
gcList = pce[5].split(' ')
gList = []
for gc in gcList:
if gc != '':
gVal = propDict[gc]
gList.append((gVal[0],gVal[1],'document'))
wcList = pce[6].split(' ')
wList = []
for wc in wcList:
if wc != '':
wVal = propDict[wc]
wList.append((wVal[0],wVal[1],'document'))
modQual = pce[7]
rcList = pce[8].split(' ')
rList = []
for rc in rcList:
if rc != '':
rVal = propDict[rc]
rList.append((rVal[0],rVal[1],'document'))
xmlBuf += '<persona_characteristic persona=\"' + pName + '\" behavioural_variable=\"' + bvName + '\" modal_qualifier=\"' + modQual + '\" >\n <definition>' + pcName + '</definition>\n'
for g in gList:
xmlBuf += ' <grounds type=\"document\" reference=\"' + g[0] + '\" />\n'
for w in wList:
xmlBuf += ' <warrant type=\"document\" reference=\"' + w[0] + '\" />\n'
for r in rList:
xmlBuf += ' <rebuttal type=\"document\" reference=\"' + r[0] + '\" />\n'
xmlBuf += '</persona_characteristic>\n'
pcf.close()
pnDict = {}
with open(args.narrativesFile,'r') as nFile:
nReader = csv.reader(nFile, delimiter = ',', quotechar='"')
for row in nReader:
pnDict[(row[0],row[1])] = row[2]
pHdr = ''
for personaName in personaNames:
pHdr += '<persona name=\"' + personaName + '\" type=\"Primary\" assumption_persona=\"FALSE\" image=\"\" >\n <activities>' + pnDict[(personaName,'ACT')] + '</activities>\n <attitudes>' + pnDict[(personaName,'ATT')] + '</attitudes>\n <aptitudes>' + pnDict[(personaName,'APT')] + '</aptitudes>\n <motivations>' + pnDict[(personaName,'MOT')] + '</motivations>\n <skills>' + pnDict[(personaName,'SKI')] + '</skills>\n <intrinsic>' + pnDict[(personaName,'INT')] + '</intrinsic>\n <contextual>' + pnDict[(personaName,'CON')] + '</contextual>\n<persona_environment name=\"' + args.contextName + '\" is_direct="TRUE">\n <persona_role name="Undefined" />\n <narrative>Nothing stipulated</narrative>\n</persona_environment>\n</persona>\n\n'
xmlBuf = xmlHdr + '\n' + pHdr + '\n' + xmlBuf + '\n</usability>\n</cairis_model>'
xmlOut = open(args.modelFile,"w")
xmlOut.write(xmlBuf)
xmlOut.close()
if __name__ == '__main__':
main()
|
apache-2.0
| -7,310,285,682,522,548,000
| 43.571429
| 741
| 0.626748
| false
| 3.141419
| false
| false
| false
|
ping/instagram_private_api
|
instagram_web_api/http.py
|
1
|
3461
|
from io import BytesIO
import sys
import codecs
import mimetypes
import random
import string
from .compat import compat_cookiejar, compat_pickle
class ClientCookieJar(compat_cookiejar.CookieJar):
"""Custom CookieJar that can be pickled to/from strings
"""
def __init__(self, cookie_string=None, policy=None):
compat_cookiejar.CookieJar.__init__(self, policy)
if cookie_string:
if isinstance(cookie_string, bytes):
self._cookies = compat_pickle.loads(cookie_string)
else:
self._cookies = compat_pickle.loads(cookie_string.encode('utf-8'))
@property
def auth_expires(self):
try:
return min([
cookie.expires for cookie in self
if cookie.name in ('sessionid', 'ds_user_id', 'ds_user')
and cookie.expires])
except ValueError:
# empty sequence
pass
return None
@property
def expires_earliest(self):
"""For backward compatibility"""
return self.auth_expires
def dump(self):
return compat_pickle.dumps(self._cookies)
class MultipartFormDataEncoder(object):
"""
Modified from
http://stackoverflow.com/questions/1270518/python-standard-library-to-post-multipart-form-data-encoded-data
"""
def __init__(self, boundary=None):
self.boundary = boundary or \
''.join(random.choice(string.ascii_letters + string.digits + '_-') for _ in range(30))
self.content_type = 'multipart/form-data; boundary={}'.format(self.boundary)
@classmethod
def u(cls, s):
if sys.hexversion < 0x03000000 and isinstance(s, str):
s = s.decode('utf-8')
if sys.hexversion >= 0x03000000 and isinstance(s, bytes):
s = s.decode('utf-8')
return s
def iter(self, fields, files):
"""
:param fields: sequence of (name, value) elements for regular form fields
:param files: sequence of (name, filename, contenttype, filedata) elements for data to be uploaded as files
:return:
"""
encoder = codecs.getencoder('utf-8')
for (key, value) in fields:
key = self.u(key)
yield encoder('--{}\r\n'.format(self.boundary))
yield encoder(self.u('Content-Disposition: form-data; name="{}"\r\n').format(key))
yield encoder('\r\n')
if isinstance(value, (int, float)):
value = str(value)
yield encoder(self.u(value))
yield encoder('\r\n')
for (key, filename, contenttype, fd) in files:
key = self.u(key)
filename = self.u(filename)
yield encoder('--{}\r\n'.format(self.boundary))
yield encoder(self.u('Content-Disposition: form-data; name="{}"; filename="{}"\r\n').format(key, filename))
yield encoder('Content-Type: {}\r\n'.format(
contenttype or mimetypes.guess_type(filename)[0] or 'application/octet-stream'))
yield encoder('Content-Transfer-Encoding: binary\r\n')
yield encoder('\r\n')
yield (fd, len(fd))
yield encoder('\r\n')
yield encoder('--{}--\r\n'.format(self.boundary))
def encode(self, fields, files):
body = BytesIO()
for chunk, _ in self.iter(fields, files):
body.write(chunk)
return self.content_type, body.getvalue()
|
mit
| -6,506,087,738,307,260,000
| 35.819149
| 119
| 0.588847
| false
| 4.091017
| false
| false
| false
|
catalpainternational/OIPA
|
OIPA/iati/management/commands/organisation_name_updater.py
|
1
|
1062
|
from __future__ import print_function
from builtins import object
from django.core.management.base import BaseCommand
from iati.models import Organisation
from iati_synchroniser.models import Publisher
class Command(BaseCommand):
option_list = BaseCommand.option_list
counter = 0
def handle(self, *args, **options):
updater = OrganisationNameUpdater()
updater.update()
class OrganisationNameUpdater(object):
def update(self):
for o in Organisation.objects.filter(name=None):
try:
organisation_code = o.code
if Publisher.objects.exists(org_id=organisation_code):
current_publisher = Publisher.objects.get(org_id=organisation_code)
if o.abbreviation == None:
o.abbreviation = current_publisher.org_abbreviate
o.name = current_publisher.org_name
o.save()
except Exception as e:
print("error in update_organisation_names")
return True
|
agpl-3.0
| 1,909,566,768,982,811,000
| 32.1875
| 87
| 0.629002
| false
| 4.481013
| false
| false
| false
|
jpleger/django-analystnotes
|
analystnotes/migrations/0001_initial.py
|
1
|
1830
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Command',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('cmd', models.CharField(max_length=2048, verbose_name=b'Command', db_index=True)),
('stdout', models.TextField(null=True, verbose_name=b'Standard Out', blank=True)),
('stderr', models.TextField(null=True, verbose_name=b'Standard Error', blank=True)),
('execute_time', models.DateTimeField(auto_now_add=True, verbose_name=b'Process Execute Time')),
('exitcode', models.IntegerField(verbose_name=b'Process Exit Code', db_index=True)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64, verbose_name=b'Name of project', db_index=True)),
('slug', models.SlugField(unique=True, max_length=128, verbose_name=b'Slug Name')),
('created', models.DateTimeField(auto_now_add=True, verbose_name=b'Date project created', db_index=True)),
('owner', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='command',
name='project',
field=models.ForeignKey(to='analystnotes.Project'),
),
]
|
bsd-3-clause
| 8,535,255,927,093,436,000
| 43.634146
| 122
| 0.596721
| false
| 4.187643
| false
| false
| false
|
xissy/titanium-mobile-sdk
|
module/iphone/templates/build.py
|
1
|
6525
|
#!/usr/bin/env python
#
# Appcelerator Titanium Module Packager
#
#
import os, subprocess, sys, glob, string
import zipfile
from datetime import date
cwd = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
os.chdir(cwd)
required_module_keys = ['name','version','moduleid','description','copyright','license','copyright','platform','minsdk']
module_defaults = {
'description':'My module',
'author': 'Your Name',
'license' : 'Specify your license',
'copyright' : 'Copyright (c) %s by Your Company' % str(date.today().year),
}
module_license_default = "TODO: place your license here and we'll include it in the module distribution"
def find_sdk(config):
sdk = config['TITANIUM_SDK']
return os.path.expandvars(os.path.expanduser(sdk))
def replace_vars(config,token):
idx = token.find('$(')
while idx != -1:
idx2 = token.find(')',idx+2)
if idx2 == -1: break
key = token[idx+2:idx2]
if not config.has_key(key): break
token = token.replace('$(%s)' % key, config[key])
idx = token.find('$(')
return token
def read_ti_xcconfig():
contents = open(os.path.join(cwd,'titanium.xcconfig')).read()
config = {}
for line in contents.splitlines(False):
line = line.strip()
if line[0:2]=='//': continue
idx = line.find('=')
if idx > 0:
key = line[0:idx].strip()
value = line[idx+1:].strip()
config[key] = replace_vars(config,value)
return config
def generate_doc(config):
docdir = os.path.join(cwd,'documentation')
if not os.path.exists(docdir):
print "Couldn't find documentation file at: %s" % docdir
return None
try:
import markdown2 as markdown
except ImportError:
import markdown
documentation = []
for file in os.listdir(docdir):
if file in ignoreFiles or os.path.isdir(os.path.join(docdir, file)):
continue
md = open(os.path.join(docdir,file)).read()
html = markdown.markdown(md)
documentation.append({file:html});
return documentation
def compile_js(manifest,config):
js_file = os.path.join(cwd,'assets','__MODULE_ID__.js')
if not os.path.exists(js_file): return
from compiler import Compiler
try:
import json
except:
import simplejson as json
path = os.path.basename(js_file)
compiler = Compiler(cwd, manifest['moduleid'], manifest['name'], 'commonjs')
method = compiler.compile_commonjs_file(path,js_file)
exports = open('metadata.json','w')
json.dump({'exports':compiler.exports }, exports)
exports.close()
method += '\treturn filterDataInRange([NSData dataWithBytesNoCopy:data length:sizeof(data) freeWhenDone:NO], ranges[0]);'
f = os.path.join(cwd,'Classes','___PROJECTNAMEASIDENTIFIER___ModuleAssets.m')
c = open(f).read()
templ_search = ' moduleAsset\n{\n'
idx = c.find(templ_search) + len(templ_search)
before = c[0:idx]
after = """
}
@end
"""
newc = before + method + after
if newc!=c:
x = open(f,'w')
x.write(newc)
x.close()
def die(msg):
print msg
sys.exit(1)
def warn(msg):
print "[WARN] %s" % msg
def validate_license():
c = open(os.path.join(cwd,'LICENSE')).read()
if c.find(module_license_default)!=-1:
warn('please update the LICENSE file with your license text before distributing')
def validate_manifest():
path = os.path.join(cwd,'manifest')
f = open(path)
if not os.path.exists(path): die("missing %s" % path)
manifest = {}
for line in f.readlines():
line = line.strip()
if line[0:1]=='#': continue
if line.find(':') < 0: continue
key,value = line.split(':')
manifest[key.strip()]=value.strip()
for key in required_module_keys:
if not manifest.has_key(key): die("missing required manifest key '%s'" % key)
if module_defaults.has_key(key):
defvalue = module_defaults[key]
curvalue = manifest[key]
if curvalue==defvalue: warn("please update the manifest key: '%s' to a non-default value" % key)
return manifest,path
ignoreFiles = ['.DS_Store','.gitignore','libTitanium.a','titanium.jar','README','__MODULE_ID__.js']
ignoreDirs = ['.DS_Store','.svn','.git','CVSROOT']
def zip_dir(zf,dir,basepath,ignore=[]):
for root, dirs, files in os.walk(dir):
for name in ignoreDirs:
if name in dirs:
dirs.remove(name) # don't visit ignored directories
for file in files:
if file in ignoreFiles: continue
e = os.path.splitext(file)
if len(e)==2 and e[1]=='.pyc':continue
from_ = os.path.join(root, file)
to_ = from_.replace(dir, basepath, 1)
zf.write(from_, to_)
def glob_libfiles():
files = []
for libfile in glob.glob('build/**/*.a'):
if libfile.find('Release-')!=-1:
files.append(libfile)
return files
def build_module(manifest,config):
from tools import ensure_dev_path
ensure_dev_path()
rc = os.system("xcodebuild -sdk iphoneos -configuration Release")
if rc != 0:
die("xcodebuild failed")
rc = os.system("xcodebuild -sdk iphonesimulator -configuration Release")
if rc != 0:
die("xcodebuild failed")
# build the merged library using lipo
moduleid = manifest['moduleid']
libpaths = ''
for libfile in glob_libfiles():
libpaths+='%s ' % libfile
os.system("lipo %s -create -output build/lib%s.a" %(libpaths,moduleid))
def package_module(manifest,mf,config):
name = manifest['name'].lower()
moduleid = manifest['moduleid'].lower()
version = manifest['version']
modulezip = '%s-iphone-%s.zip' % (moduleid,version)
if os.path.exists(modulezip): os.remove(modulezip)
zf = zipfile.ZipFile(modulezip, 'w', zipfile.ZIP_DEFLATED)
modulepath = 'modules/iphone/%s/%s' % (moduleid,version)
zf.write(mf,'%s/manifest' % modulepath)
libname = 'lib%s.a' % moduleid
zf.write('build/%s' % libname, '%s/%s' % (modulepath,libname))
docs = generate_doc(config)
if docs!=None:
for doc in docs:
for file, html in doc.iteritems():
filename = string.replace(file,'.md','.html')
zf.writestr('%s/documentation/%s'%(modulepath,filename),html)
for dn in ('assets','example','platform'):
if os.path.exists(dn):
zip_dir(zf,dn,'%s/%s' % (modulepath,dn),['README'])
zf.write('LICENSE','%s/LICENSE' % modulepath)
zf.write('module.xcconfig','%s/module.xcconfig' % modulepath)
exports_file = 'metadata.json'
if os.path.exists(exports_file):
zf.write(exports_file, '%s/%s' % (modulepath, exports_file))
zf.close()
if __name__ == '__main__':
manifest,mf = validate_manifest()
validate_license()
config = read_ti_xcconfig()
sdk = find_sdk(config)
sys.path.insert(0,os.path.join(sdk,'iphone'))
sys.path.append(os.path.join(sdk, "common"))
compile_js(manifest,config)
build_module(manifest,config)
package_module(manifest,mf,config)
sys.exit(0)
|
apache-2.0
| 4,836,279,273,150,975,000
| 28.794521
| 122
| 0.677548
| false
| 2.903872
| true
| false
| false
|
nens/threedi-qgis-plugin
|
tool_commands/control_structures/create_measuring_group_dialog.py
|
1
|
14526
|
from pathlib import Path
from qgis.PyQt import uic
from qgis.PyQt.QtWidgets import QAbstractItemView
from qgis.PyQt.QtWidgets import QDialog
from qgis.PyQt.QtWidgets import QPushButton
from qgis.PyQt.QtWidgets import QTableWidget
from qgis.PyQt.QtWidgets import QTableWidgetItem
from qgis.PyQt.QtWidgets import QVBoxLayout
from qgis.PyQt.QtWidgets import QWidget
from ThreeDiToolbox.tool_commands.control_structures.main import ControlledStructures
from ThreeDiToolbox.utils.threedi_database import get_database_properties
from ThreeDiToolbox.utils.threedi_database import get_databases
import logging
logger = logging.getLogger(__name__)
ui_file = Path(__file__).parent / "create_measuring_group_dialog.ui"
assert ui_file.is_file()
FORM_CLASS, _ = uic.loadUiType(ui_file)
class CreateMeasuringGroupDialogWidget(QDialog, FORM_CLASS):
def __init__(
self,
parent=None,
command=None,
db_key=None,
measuring_group_id=None,
dockwidget_controlled_structures=None,
):
"""Constructor
# TODO: fix arguments documentation.
Args:
parent: Qt parent Widget
iface: QGiS interface
command: Command instance with a run_it method which will be called
on acceptance of the dialog
"""
super().__init__(parent)
# Show gui
self.setupUi(self)
self.measuring_group_id = measuring_group_id
self.command = command
self.dockwidget_controlled_structures = dockwidget_controlled_structures
self.db_key = db_key
self.databases = get_databases()
self.db = get_database_properties(self.db_key)
self.control_structure = ControlledStructures(
flavor=self.db["db_entry"]["db_type"]
)
self.setup_tablewidget()
self.update_ids()
self.connect_signals()
def on_accept(self):
"""Accept and run the Command.run_it method."""
self.save_measuring_group()
self.accept()
def on_reject(self):
"""Cancel"""
self.reject()
logger.debug("Reject")
def closeEvent(self, event):
"""
Close widget, called by Qt on close
:param event: QEvent, close event
"""
self.buttonbox.accepted.disconnect(self.on_accept)
self.buttonbox.rejected.disconnect(self.on_reject)
event.accept()
def setup_tablewidget(self):
tablewidget = self.tablewidget_measuring_point
tablewidget.setCellWidget(0, 0, self.combobox_input_measuring_point_table)
tablewidget.setCellWidget(0, 1, self.combobox_input_measuring_point_id)
tablewidget.setCellWidget(0, 3, self.pushbutton_input_measuring_point_new)
def update_ids(self):
"""Setup the id's for the measuring group and measuring points."""
# Set the id of the measuring group
self.label_measuring_group_id_info.setText(self.measuring_group_id)
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Set all id's of the measuring groups
self.combobox_measuring_group_load.clear()
list_of_measuring_group_ids = self.control_structure.get_attributes(
table_name="v2_control_measure_group", attribute_name="id"
)
self.combobox_measuring_group_load.addItems(list_of_measuring_group_ids)
# Set all id's of the connection nodes
self.combobox_input_measuring_point_id.clear()
list_of_connection_node_ids = self.control_structure.get_attributes(
table_name="v2_connection_nodes", attribute_name="id"
)
self.combobox_input_measuring_point_id.addItems(list_of_connection_node_ids)
def connect_signals(self):
"""Connect the signals."""
self.pushbutton_measuring_group_load.clicked.connect(self.load_measuring_group)
self.pushbutton_input_measuring_point_new.clicked.connect(
self.create_new_measuring_point
)
self.buttonbox.accepted.connect(self.on_accept)
self.buttonbox.rejected.connect(self.on_reject)
def create_new_measuring_point(self):
# Get the model
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Get last id of measure map or set to 0; set to +1
table_name = "v2_control_measure_map"
attribute_name = "MAX(id)"
try:
max_id_measure_map = int(
self.control_structure.get_attributes(table_name, attribute_name)[0]
)
except ValueError:
logger.exception("Error determining max id, using 0")
max_id_measure_map = 0
new_max_id_measure_map = max_id_measure_map + 1
# Populate the new row in the table
self.populate_measuring_point_row(new_max_id_measure_map)
def populate_measuring_point_row(self, id_measuring_point):
"""
Populate a row from the measuring point table.
Args:
(str) id_measuring_point: The id of the measuring point."""
tablewidget = self.tablewidget_measuring_point
# Always put the new row on top.
row_position = 1
tablewidget.insertRow(row_position)
# tablewidget.setItem(row_position, 0, measuring_point_id)
measuring_point_table_widget = QTableWidgetItem(
self.combobox_input_measuring_point_table.currentText()
)
tablewidget.setItem(row_position, 0, measuring_point_table_widget)
measuring_point_table_id_widget = QTableWidgetItem(
self.combobox_input_measuring_point_id.currentText()
)
tablewidget.setItem(row_position, 1, measuring_point_table_id_widget)
try:
measuring_point_weight = tablewidget.item(0, 2).text()
except AttributeError:
logger.exception(
"Error determining measuring point weight, using emty string"
)
measuring_point_weight = ""
tablewidget.setItem(row_position, 2, QTableWidgetItem(measuring_point_weight))
measuring_point_remove_widget = QPushButton("Remove")
measuring_point_remove_widget.clicked.connect(self.remove_measuring_point_row)
tablewidget.setCellWidget(row_position, 3, measuring_point_remove_widget)
def remove_measuring_point_row(self):
"""Remove a row from the measuring point table."""
tablewidget = self.tablewidget_measuring_point
row_number = tablewidget.currentRow()
# Remove measuring point from dockwidget
# Don't remove the first row.
BUTTON_ROW = 0
if row_number != BUTTON_ROW:
tablewidget.removeRow(row_number)
def load_measuring_group(self):
"""Load a measuring group in the tablewidget."""
# Remove all current rows, besides te first.
tablewidget = self.tablewidget_measuring_point
row_count = tablewidget.rowCount()
for row in range(row_count - 1):
tablewidget.removeRow(1)
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Get all the measuring points from a certain measure group
table_name = "v2_control_measure_map"
attribute_name = "*"
where_clause = "measure_group_id={}".format(
self.combobox_measuring_group_load.currentText()
)
measure_groups = self.control_structure.get_features_with_where_clause(
table_name=table_name, attribute_name=attribute_name, where=where_clause
)
for measure_group in measure_groups:
row_position = self.tablewidget_measuring_point.rowCount()
self.tablewidget_measuring_point.insertRow(row_position)
self.tablewidget_measuring_point.setItem(
row_position, 0, QTableWidgetItem(str(measure_group[2]))
)
self.tablewidget_measuring_point.setItem(
row_position, 1, QTableWidgetItem(str(measure_group[3]))
)
self.tablewidget_measuring_point.setItem(
row_position, 2, QTableWidgetItem(str(measure_group[4]))
)
measuring_point_remove = QPushButton("Remove")
measuring_point_remove.clicked.connect(self.remove_measuring_point)
self.tablewidget_measuring_point.setCellWidget(
row_position, 3, measuring_point_remove
)
def remove_measuring_point(self):
"""Remove a certain measuring point from the tablewidget."""
tablewidget = self.tablewidget_measuring_point
row_number = tablewidget.currentRow()
tablewidget.removeRow(row_number)
def save_measuring_group(self):
"""Save the measuring group in the database."""
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Insert the measuring group in the v2_control_measure_group
table_name = "v2_control_measure_group"
attributes = {"id": self.measuring_group_id}
self.control_structure.insert_into_table(
table_name=table_name, attributes=attributes
)
# Create a tab in the tabwidget of the 'Measuring group' tab in
# the controlled structures dockwidget
self.add_measuring_group_tab_dockwidget()
table_name = "v2_control_measure_map"
BUTTON_ROW = 0
for row in range(self.tablewidget_measuring_point.rowCount()):
if row != BUTTON_ROW:
# Get the new measuring_point id
attribute_name = "MAX(id)"
try:
max_id_measure_point = int(
self.control_structure.get_attributes(
table_name, attribute_name
)[0]
)
except ValueError:
logger.exception("Error determining max measure point id, using 0")
max_id_measure_point = 0
new_measuring_point_id = max_id_measure_point + 1
measure_point_attributes = self.get_measuring_point_attributes(
row, new_measuring_point_id
)
# Save the measuring point in the v2_control_measure_map
self.control_structure.insert_into_table(
table_name, measure_point_attributes
)
# Setup new tab of "Measuring group" tab
self.setup_measuring_group_table_dockwidget(measure_point_attributes)
def add_measuring_group_tab_dockwidget(self):
"""
Create a tab for the measure group within the Measure group tab
in the dockwidget.
"""
tab = QWidget()
layout = QVBoxLayout(tab)
tab.setLayout(layout)
table_measuring_group = QTableWidget(tab)
table_measuring_group.setGeometry(10, 10, 741, 266)
table_measuring_group.insertColumn(0)
table_measuring_group.setHorizontalHeaderItem(0, QTableWidgetItem("table"))
table_measuring_group.insertColumn(1)
table_measuring_group.setHorizontalHeaderItem(1, QTableWidgetItem("table_id"))
table_measuring_group.insertColumn(2)
table_measuring_group.setHorizontalHeaderItem(2, QTableWidgetItem("weight"))
table_measuring_group.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.dockwidget_controlled_structures.table_measuring_group = (
table_measuring_group
)
# Add the tab to the left
self.dockwidget_controlled_structures.tab_measuring_group_view_2.insertTab(
0, tab, "Group: {}".format(str(self.label_measuring_group_id_info.text()))
)
def get_measuring_point_attributes(self, row_nr, new_measuring_point_id):
"""
Get the attributes of the measuring point from the table.
Args:
(int) row_nr: The row number of the tablewidget.
(int) new_measuring_point_id: The id of the new measuring point.
Returns:
(dict) attributes: A list containing the attributes
of the measuring point.
"""
measuring_point_table = self.tablewidget_measuring_point.item(row_nr, 0).text()
try:
measuring_point_table_id = self.tablewidget_measuring_point.item(
row_nr, 1
).text()
except AttributeError:
# TODO: I've seen this measuring_point_table_id try/except
# before. Can it be unified?
logger.exception(
"Error grabbing measuring point table id, using current text"
)
measuring_point_table_id = self.tablewidget_measuring_point.cellWidget(
row_nr, 1
).currentText()
try:
measuring_point_weight = self.tablewidget_measuring_point.item(
row_nr, 2
).text()
except AttributeError:
logger.exception(
"Error grabbing measuring point weight, using empty string"
)
measuring_point_weight = ""
attributes = {
"id": new_measuring_point_id,
"measure_group_id": self.measuring_group_id,
"object_type": measuring_point_table,
"object_id": measuring_point_table_id,
"weight": measuring_point_weight,
}
return attributes
def setup_measuring_group_table_dockwidget(self, measure_map_attributes):
"""
Setup a tab for the measure group in the Measure group tab
in the dockwidget.
Args:
(dict) measure_map_attributes: A dict containing the attributes
from the measuring point (from v2_control_measure_map).
"""
row_position = (
self.dockwidget_controlled_structures.table_measuring_group.rowCount()
)
self.dockwidget_controlled_structures.table_measuring_group.insertRow(
row_position
)
self.dockwidget_controlled_structures.table_measuring_group.setItem(
row_position, 0, QTableWidgetItem("v2_connection_nodes")
)
self.dockwidget_controlled_structures.table_measuring_group.setItem(
row_position, 1, QTableWidgetItem(measure_map_attributes["object_id"])
)
self.dockwidget_controlled_structures.table_measuring_group.setItem(
row_position, 2, QTableWidgetItem(measure_map_attributes["weight"])
)
|
gpl-3.0
| -5,747,395,667,659,924,000
| 40.741379
| 87
| 0.629423
| false
| 3.99505
| false
| false
| false
|
psychopy/versions
|
psychopy/visual/line.py
|
1
|
2906
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Creates a Line between two points as a special case of a
:class:`~psychopy.visual.ShapeStim`
"""
# Part of the PsychoPy library
# Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2020 Open Science Tools Ltd.
# Distributed under the terms of the GNU General Public License (GPL).
from __future__ import absolute_import, division, print_function
import psychopy # so we can get the __path__
from psychopy import logging
import numpy
from psychopy.visual.shape import ShapeStim
from psychopy.tools.attributetools import attributeSetter, setAttribute
class Line(ShapeStim):
"""Creates a Line between two points.
(New in version 1.72.00)
"""
def __init__(self, win, start=(-.5, -.5), end=(.5, .5), **kwargs):
"""Line accepts all input parameters, that
:class:`~psychopy.visual.ShapeStim` accepts, except
for vertices, closeShape and fillColor.
:Notes:
The `contains` method always return False because a line is not a
proper (2D) polygon.
"""
# what local vars are defined (these are the init params) for use by
# __repr__
self._initParams = dir()
self._initParams.remove('self')
# kwargs isn't a parameter, but a list of params
self._initParams.remove('kwargs')
self._initParams.extend(kwargs)
self.__dict__['start'] = numpy.array(start)
self.__dict__['end'] = numpy.array(end)
self.__dict__['vertices'] = [start, end]
kwargs['closeShape'] = False # Make sure nobody messes around here
kwargs['vertices'] = self.vertices
kwargs['fillColor'] = None
super(Line, self).__init__(win, **kwargs)
@attributeSetter
def start(self, start):
"""tuple, list or 2x1 array.
Specifies the position of the start of the line.
:ref:`Operations <attrib-operations>` supported.
"""
self.__dict__['start'] = numpy.array(start)
self.setVertices([self.start, self.end], log=False)
def setStart(self, start, log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message.
"""
setAttribute(self, 'start', start, log)
@attributeSetter
def end(self, end):
"""tuple, list or 2x1 array
Specifies the position of the end of the line.
:ref:`Operations <attrib-operations>` supported."""
self.__dict__['end'] = numpy.array(end)
self.setVertices([self.start, self.end], log=False)
def setEnd(self, end, log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message.
"""
setAttribute(self, 'end', end, log)
def contains(self, *args, **kwargs):
return False
|
gpl-3.0
| -6,665,230,088,042,024,000
| 32.790698
| 79
| 0.627667
| false
| 3.874667
| false
| false
| false
|
jdweeks/screener
|
screener.py
|
1
|
2569
|
#!/usr/local/bin/python3
import os, sys, getopt
import quandl as ql
import pandas as pd
import numpy as np
from pylab import plot, figure, savefig
# read Russell 3000 constituents from a csv
def readRuss():
try:
ticks = []
russ = open('russ3.csv', 'r').read()
split = russ.split('\n')
for tick in split:
ticks.append('WIKI/' + tick.rstrip())
return ticks
except Exception as e:
print('Failed to read Russell 3000:', str(e))
# retrieve stock data from Quandl
def getData(query, date):
try:
return ql.get(query, start_date = date)
except Exception as e:
print('Failed to get stock data:', str(e))
# fit a first-degree polynomial (i.e. a line) to the data
def calcTrend(data):
return np.polyfit(data.index.values, list(data), 1)
def main(argv):
tick = 'WIKI/' # ticker will be appended
date = '2017/01/01' # default start date
ql.ApiConfig.api_key = os.environ['QUANDL_KEY']
usage = 'usage: screener.py -t <ticker> -d <start_date>'
if len(argv) == 0:
print(usage)
sys.exit(2)
# parse command-line args
try:
opts, args = getopt.getopt(argv, 'ht:d', ['ticker=', 'date='])
except getopt.GetoptError:
print(usage)
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print(usage)
sys.exit(0)
elif opt in ('-t', '--ticker'):
tick += arg
elif opt in ('-d', '--date'):
date = arg
# retrieve the 4th & 5th cols (Close & Volume)
close = getData(tick + '.4', date)
vol = getData(tick + '.5', date)
data = pd.concat([close, vol], axis=1).reset_index(drop=True)
print(data)
# calculate trends on price and volume
pcoeffs = calcTrend(data['Close'])
vcoeffs = calcTrend(data['Volume'])
print('Price trend:', pcoeffs[0])
print('Volume trend:', vcoeffs[0])
# save plots of trend lines
xi = data.index.values
figure()
pline = pcoeffs[0] * xi + pcoeffs[1]
plot(xi, pline, 'r-', xi, list(data['Close']), '-o')
savefig('price.png')
figure()
vline = vcoeffs[0] * xi + vcoeffs[1]
plot(xi, vline, 'r-', xi, list(data['Volume']), '-o')
savefig('volume.png')
# ticks = readRuss()
# q_close = [ tick + '.4' for tick in ticks[:5] ]
# q_vol = [ tick + '.5' for tick in ticks[:5] ]
# data = getData(q_close + q_vol, '2017-01-01')
if __name__ == "__main__":
# execute only if run as a script
main(sys.argv[1:])
|
mit
| -2,508,157,812,468,462,600
| 26.042105
| 70
| 0.5652
| false
| 3.179455
| false
| false
| false
|
spesmilo/electrum
|
electrum/plugins/email_requests/qt.py
|
1
|
9900
|
#!/usr/bin/env python
#
# Electrum - Lightweight Bitcoin Client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import random
import time
import threading
import base64
from functools import partial
import traceback
import sys
from typing import Set
import smtplib
import imaplib
import email
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.encoders import encode_base64
from PyQt5.QtCore import QObject, pyqtSignal, QThread
from PyQt5.QtWidgets import (QVBoxLayout, QLabel, QGridLayout, QLineEdit,
QInputDialog)
from electrum.gui.qt.util import (EnterButton, Buttons, CloseButton, OkButton,
WindowModalDialog)
from electrum.gui.qt.main_window import ElectrumWindow
from electrum.plugin import BasePlugin, hook
from electrum.paymentrequest import PaymentRequest
from electrum.i18n import _
from electrum.logging import Logger
from electrum.wallet import Abstract_Wallet
from electrum.invoices import OnchainInvoice
class Processor(threading.Thread, Logger):
polling_interval = 5*60
def __init__(self, imap_server, username, password, callback):
threading.Thread.__init__(self)
Logger.__init__(self)
self.daemon = True
self.username = username
self.password = password
self.imap_server = imap_server
self.on_receive = callback
self.M = None
self.reset_connect_wait()
def reset_connect_wait(self):
self.connect_wait = 100 # ms, between failed connection attempts
def poll(self):
try:
self.M.select()
except:
return
typ, data = self.M.search(None, 'ALL')
for num in str(data[0], 'utf8').split():
typ, msg_data = self.M.fetch(num, '(RFC822)')
msg = email.message_from_bytes(msg_data[0][1])
p = msg.get_payload()
if not msg.is_multipart():
p = [p]
continue
for item in p:
if item.get_content_type() == "application/bitcoin-paymentrequest":
pr_str = item.get_payload()
pr_str = base64.b64decode(pr_str)
self.on_receive(pr_str)
def run(self):
while True:
try:
self.M = imaplib.IMAP4_SSL(self.imap_server)
self.M.login(self.username, self.password)
except BaseException as e:
self.logger.info(f'connecting failed: {repr(e)}')
self.connect_wait *= 2
else:
self.reset_connect_wait()
# Reconnect when host changes
while self.M and self.M.host == self.imap_server:
try:
self.poll()
except BaseException as e:
self.logger.info(f'polling failed: {repr(e)}')
break
time.sleep(self.polling_interval)
time.sleep(random.randint(0, self.connect_wait))
def send(self, recipient, message, payment_request):
msg = MIMEMultipart()
msg['Subject'] = message
msg['To'] = recipient
msg['From'] = self.username
part = MIMEBase('application', "bitcoin-paymentrequest")
part.set_payload(payment_request)
encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="payreq.btc"')
msg.attach(part)
try:
s = smtplib.SMTP_SSL(self.imap_server, timeout=2)
s.login(self.username, self.password)
s.sendmail(self.username, [recipient], msg.as_string())
s.quit()
except BaseException as e:
self.logger.info(e)
class QEmailSignalObject(QObject):
email_new_invoice_signal = pyqtSignal()
class Plugin(BasePlugin):
def fullname(self):
return 'Email'
def description(self):
return _("Send and receive payment requests via email")
def is_available(self):
return True
def __init__(self, parent, config, name):
BasePlugin.__init__(self, parent, config, name)
self.imap_server = self.config.get('email_server', '')
self.username = self.config.get('email_username', '')
self.password = self.config.get('email_password', '')
if self.imap_server and self.username and self.password:
self.processor = Processor(self.imap_server, self.username, self.password, self.on_receive)
self.processor.start()
self.obj = QEmailSignalObject()
self.obj.email_new_invoice_signal.connect(self.new_invoice)
self.wallets = set() # type: Set[Abstract_Wallet]
def on_receive(self, pr_str):
self.logger.info('received payment request')
self.pr = PaymentRequest(pr_str)
self.obj.email_new_invoice_signal.emit()
@hook
def load_wallet(self, wallet, main_window):
self.wallets |= {wallet}
@hook
def close_wallet(self, wallet):
self.wallets -= {wallet}
def new_invoice(self):
invoice = OnchainInvoice.from_bip70_payreq(self.pr)
for wallet in self.wallets:
wallet.save_invoice(invoice)
#main_window.invoice_list.update()
@hook
def receive_list_menu(self, window: ElectrumWindow, menu, addr):
menu.addAction(_("Send via e-mail"), lambda: self.send(window, addr))
def send(self, window: ElectrumWindow, addr):
from electrum import paymentrequest
req = window.wallet.receive_requests.get(addr)
if not isinstance(req, OnchainInvoice):
window.show_error("Only on-chain requests are supported.")
return
message = req.message
if req.bip70:
payload = bytes.fromhex(req.bip70)
else:
pr = paymentrequest.make_request(self.config, req)
payload = pr.SerializeToString()
if not payload:
return
recipient, ok = QInputDialog.getText(window, 'Send request', 'Email invoice to:')
if not ok:
return
recipient = str(recipient)
self.logger.info(f'sending mail to {recipient}')
try:
# FIXME this runs in the GUI thread and blocks it...
self.processor.send(recipient, message, payload)
except BaseException as e:
self.logger.exception('')
window.show_message(repr(e))
else:
window.show_message(_('Request sent.'))
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Settings'), partial(self.settings_dialog, window))
def settings_dialog(self, window):
d = WindowModalDialog(window, _("Email settings"))
d.setMinimumSize(500, 200)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('Server hosting your email account')))
grid = QGridLayout()
vbox.addLayout(grid)
grid.addWidget(QLabel('Server (IMAP)'), 0, 0)
server_e = QLineEdit()
server_e.setText(self.imap_server)
grid.addWidget(server_e, 0, 1)
grid.addWidget(QLabel('Username'), 1, 0)
username_e = QLineEdit()
username_e.setText(self.username)
grid.addWidget(username_e, 1, 1)
grid.addWidget(QLabel('Password'), 2, 0)
password_e = QLineEdit()
password_e.setText(self.password)
grid.addWidget(password_e, 2, 1)
vbox.addStretch()
vbox.addLayout(Buttons(CloseButton(d), OkButton(d)))
if not d.exec_():
return
server = str(server_e.text())
self.config.set_key('email_server', server)
self.imap_server = server
username = str(username_e.text())
self.config.set_key('email_username', username)
self.username = username
password = str(password_e.text())
self.config.set_key('email_password', password)
self.password = password
check_connection = CheckConnectionThread(server, username, password)
check_connection.connection_error_signal.connect(lambda e: window.show_message(
_("Unable to connect to mail server:\n {}").format(e) + "\n" +
_("Please check your connection and credentials.")
))
check_connection.start()
class CheckConnectionThread(QThread):
connection_error_signal = pyqtSignal(str)
def __init__(self, server, username, password):
super().__init__()
self.server = server
self.username = username
self.password = password
def run(self):
try:
conn = imaplib.IMAP4_SSL(self.server)
conn.login(self.username, self.password)
except BaseException as e:
self.connection_error_signal.emit(repr(e))
|
mit
| -5,328,565,570,429,520,000
| 34.483871
| 103
| 0.626162
| false
| 4.090909
| true
| false
| false
|
sveetch/sveedocuments
|
sveedocuments/utils/rest_roles.py
|
1
|
4637
|
# -*- coding: utf-8 -*-
"""
ReSTructured additional roles
"""
import os, re
from docutils import nodes, utils
from docutils.parsers.rst import roles
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django.contrib.sites.models import Site
from sveedocuments.models import Page, Attachment
_ATTACHMENT_ROLE_REGEX = re.compile(r"^(?:id)(?P<id>[0-9]+)(?:\-)(?P<slug>.*?)$")
def rst_parser_error(msg, rawtext, text, lineno, inliner):
msg = inliner.reporter.error(msg, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
def get_page_slugs(force_update_cache=False):
"""
Get a dict of all visible *Pages* as a tuple ``(slug, title)``
Try to get it from the cache if it exist, else build it
"""
if force_update_cache or not cache.get(settings.PAGE_SLUGS_CACHE_KEY_NAME):
slugs_map = dict(Page.objects.filter(visible=True).values_list('slug', 'title'))
cache.set(settings.PAGE_SLUGS_CACHE_KEY_NAME, slugs_map)
return slugs_map
return cache.get(settings.PAGE_SLUGS_CACHE_KEY_NAME)
def page_link(role, rawtext, text, lineno, inliner, options={}, content=[]):
"""
Role to make a reference link to other *Pages* by using their ``slug``
Usage in document :
Blah blah :page:`my-page-slug`
"""
# Get the page slugs map
slugs = get_page_slugs()
# Throw error if the given slug does not exist
if text not in slugs and not settings.DOCUMENTS_PARSER_WIKIROLE_SILENT_WARNING:
msg = inliner.reporter.error('Page with slug "%s" does not exist.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
# Add a class to the item
options.update({'classes': ['documents_page_link']})
roles.set_classes(options)
# Return the node as reference to display the link for the given page's slug
site_current = Site.objects.get_current()
url = "http://{0}{1}".format(site_current.domain, reverse('documents-page-details', args=[text]))
node = nodes.reference(rawtext, utils.unescape(slugs[text]), refuri=url, **options)
return [node], []
roles.register_local_role('page', page_link)
def get_page_attachment_slugs(page_id, force_update_cache=False):
"""
Get a dict of all Attachments linked to a Page
Try to get it from the cache if it exist, else build it
"""
cache_key = settings.PAGE_ATTACHMENTS_SLUGS_CACHE_KEY_NAME.format(page_id)
if force_update_cache or not cache.get(cache_key):
page = Page.objects.get(pk=page_id)
slugs_map = dict(page.attachment.all().values_list('slug', 'file'))
cache.set(cache_key, slugs_map)
return slugs_map
return cache.get(cache_key)
def page_attachment(role, rawtext, text, lineno, inliner, options={}, content=[]):
"""
Role to make a reference link to a Page's attachment
Usage in document :
Blah blah :attachment:`idX-slug`
Where X is the page id and slug his slugname
The page id is needed because i can't find a clean way to give some page context to
the docutils parser.
"""
matched = _ATTACHMENT_ROLE_REGEX.match(text)
if not matched or len(matched.groups())<2:
return rst_parser_error('Attachment role syntax is not respected with "{0}", you should write something like "idXX-ATTACHMENT_SLUG".'.format(text), rawtext, text, lineno, inliner)
# Get the page slugs map
pk, attachment_slug = matched.groups()
try:
slugs_map = get_page_attachment_slugs(pk)
except Page.DoesNotExist:
return rst_parser_error('Page with id "{pk}" does not exist in pattern "{pattern}"'.format(pk=pk, pattern=text), rawtext, text, lineno, inliner)
else:
if attachment_slug not in slugs_map and not settings.DOCUMENTS_PARSER_WIKIROLE_SILENT_WARNING:
return rst_parser_error('Attachment with slug "{slug}" does not exist for page id "{pk}" in pattern "{pattern}".'.format(pk=pk, slug=attachment_slug, pattern=text), rawtext, text, lineno, inliner)
link = slugs_map[attachment_slug]
# Add a class to the item
options.update({'classes': ['documents_page_attachment']})
roles.set_classes(options)
# Return the node as reference to display the link for the given page's slug
node = nodes.reference(rawtext, utils.unescape(attachment_slug), refuri=os.path.join(settings.MEDIA_URL, link), **options)
return [node], []
roles.register_local_role('attachment', page_attachment)
|
mit
| 807,631,129,012,385,500
| 40.774775
| 208
| 0.671339
| false
| 3.496983
| false
| false
| false
|
christiansandberg/canopen
|
canopen/network.py
|
1
|
12500
|
try:
from collections.abc import MutableMapping
except ImportError:
from collections import MutableMapping
import logging
import threading
import struct
try:
import can
from can import Listener
from can import CanError
except ImportError:
# Do not fail if python-can is not installed
can = None
Listener = object
CanError = Exception
from .node import RemoteNode, LocalNode
from .sync import SyncProducer
from .timestamp import TimeProducer
from .nmt import NmtMaster
from .lss import LssMaster
from .objectdictionary.eds import import_from_node
logger = logging.getLogger(__name__)
class Network(MutableMapping):
"""Representation of one CAN bus containing one or more nodes."""
def __init__(self, bus=None):
"""
:param can.BusABC bus:
A python-can bus instance to re-use.
"""
#: A python-can :class:`can.BusABC` instance which is set after
#: :meth:`canopen.Network.connect` is called
self.bus = bus
#: A :class:`~canopen.network.NodeScanner` for detecting nodes
self.scanner = NodeScanner(self)
#: List of :class:`can.Listener` objects.
#: Includes at least MessageListener.
self.listeners = [MessageListener(self)]
self.notifier = None
self.nodes = {}
self.subscribers = {}
self.send_lock = threading.Lock()
self.sync = SyncProducer(self)
self.time = TimeProducer(self)
self.nmt = NmtMaster(0)
self.nmt.network = self
self.lss = LssMaster()
self.lss.network = self
self.subscribe(self.lss.LSS_RX_COBID, self.lss.on_message_received)
def subscribe(self, can_id, callback):
"""Listen for messages with a specific CAN ID.
:param int can_id:
The CAN ID to listen for.
:param callback:
Function to call when message is received.
"""
self.subscribers.setdefault(can_id, list())
if callback not in self.subscribers[can_id]:
self.subscribers[can_id].append(callback)
def unsubscribe(self, can_id, callback=None):
"""Stop listening for message.
:param int can_id:
The CAN ID from which to unsubscribe.
:param callback:
If given, remove only this callback. Otherwise all callbacks for
the CAN ID.
"""
if callback is None:
del self.subscribers[can_id]
else:
self.subscribers[can_id].remove(callback)
def connect(self, *args, **kwargs):
"""Connect to CAN bus using python-can.
Arguments are passed directly to :class:`can.BusABC`. Typically these
may include:
:param channel:
Backend specific channel for the CAN interface.
:param str bustype:
Name of the interface. See
`python-can manual <https://python-can.readthedocs.io/en/latest/configuration.html#interface-names>`__
for full list of supported interfaces.
:param int bitrate:
Bitrate in bit/s.
:raises can.CanError:
When connection fails.
"""
# If bitrate has not been specified, try to find one node where bitrate
# has been specified
if "bitrate" not in kwargs:
for node in self.nodes.values():
if node.object_dictionary.bitrate:
kwargs["bitrate"] = node.object_dictionary.bitrate
break
self.bus = can.interface.Bus(*args, **kwargs)
logger.info("Connected to '%s'", self.bus.channel_info)
self.notifier = can.Notifier(self.bus, self.listeners, 1)
return self
def disconnect(self):
"""Disconnect from the CAN bus.
Must be overridden in a subclass if a custom interface is used.
"""
for node in self.nodes.values():
if hasattr(node, "pdo"):
node.pdo.stop()
if self.notifier is not None:
self.notifier.stop()
if self.bus is not None:
self.bus.shutdown()
self.bus = None
self.check()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.disconnect()
def add_node(self, node, object_dictionary=None, upload_eds=False):
"""Add a remote node to the network.
:param node:
Can be either an integer representing the node ID, a
:class:`canopen.RemoteNode` or :class:`canopen.LocalNode` object.
:param object_dictionary:
Can be either a string for specifying the path to an
Object Dictionary file or a
:class:`canopen.ObjectDictionary` object.
:param bool upload_eds:
Set ``True`` if EDS file should be uploaded from 0x1021.
:return:
The Node object that was added.
:rtype: canopen.RemoteNode
"""
if isinstance(node, int):
if upload_eds:
logger.info("Trying to read EDS from node %d", node)
object_dictionary = import_from_node(node, self)
node = RemoteNode(node, object_dictionary)
self[node.id] = node
return node
def create_node(self, node, object_dictionary=None):
"""Create a local node in the network.
:param node:
An integer representing the node ID.
:param object_dictionary:
Can be either a string for specifying the path to an
Object Dictionary file or a
:class:`canopen.ObjectDictionary` object.
:return:
The Node object that was added.
:rtype: canopen.LocalNode
"""
if isinstance(node, int):
node = LocalNode(node, object_dictionary)
self[node.id] = node
return node
def send_message(self, can_id, data, remote=False):
"""Send a raw CAN message to the network.
This method may be overridden in a subclass if you need to integrate
this library with a custom backend.
It is safe to call this from multiple threads.
:param int can_id:
CAN-ID of the message
:param data:
Data to be transmitted (anything that can be converted to bytes)
:param bool remote:
Set to True to send remote frame
:raises can.CanError:
When the message fails to be transmitted
"""
if not self.bus:
raise RuntimeError("Not connected to CAN bus")
msg = can.Message(is_extended_id=can_id > 0x7FF,
arbitration_id=can_id,
data=data,
is_remote_frame=remote)
with self.send_lock:
self.bus.send(msg)
self.check()
def send_periodic(self, can_id, data, period, remote=False):
"""Start sending a message periodically.
:param int can_id:
CAN-ID of the message
:param data:
Data to be transmitted (anything that can be converted to bytes)
:param float period:
Seconds between each message
:param bool remote:
indicates if the message frame is a remote request to the slave node
:return:
An task object with a ``.stop()`` method to stop the transmission
:rtype: canopen.network.PeriodicMessageTask
"""
return PeriodicMessageTask(can_id, data, period, self.bus, remote)
def notify(self, can_id, data, timestamp):
"""Feed incoming message to this library.
If a custom interface is used, this function must be called for each
message read from the CAN bus.
:param int can_id:
CAN-ID of the message
:param bytearray data:
Data part of the message (0 - 8 bytes)
:param float timestamp:
Timestamp of the message, preferably as a Unix timestamp
"""
if can_id in self.subscribers:
callbacks = self.subscribers[can_id]
for callback in callbacks:
callback(can_id, data, timestamp)
self.scanner.on_message_received(can_id)
def check(self):
"""Check that no fatal error has occurred in the receiving thread.
If an exception caused the thread to terminate, that exception will be
raised.
"""
if self.notifier is not None:
exc = self.notifier.exception
if exc is not None:
logger.error("An error has caused receiving of messages to stop")
raise exc
def __getitem__(self, node_id):
return self.nodes[node_id]
def __setitem__(self, node_id, node):
assert node_id == node.id
self.nodes[node_id] = node
node.associate_network(self)
def __delitem__(self, node_id):
self.nodes[node_id].remove_network()
del self.nodes[node_id]
def __iter__(self):
return iter(self.nodes)
def __len__(self):
return len(self.nodes)
class PeriodicMessageTask(object):
"""
Task object to transmit a message periodically using python-can's
CyclicSendTask
"""
def __init__(self, can_id, data, period, bus, remote=False):
"""
:param int can_id:
CAN-ID of the message
:param data:
Data to be transmitted (anything that can be converted to bytes)
:param float period:
Seconds between each message
:param can.BusABC bus:
python-can bus to use for transmission
"""
self.bus = bus
self.period = period
self.msg = can.Message(is_extended_id=can_id > 0x7FF,
arbitration_id=can_id,
data=data, is_remote_frame=remote)
self._task = None
self._start()
def _start(self):
self._task = self.bus.send_periodic(self.msg, self.period)
def stop(self):
"""Stop transmission"""
self._task.stop()
def update(self, data):
"""Update data of message
:param data:
New data to transmit
"""
new_data = bytearray(data)
old_data = self.msg.data
self.msg.data = new_data
if hasattr(self._task, "modify_data"):
self._task.modify_data(self.msg)
elif new_data != old_data:
# Stop and start (will mess up period unfortunately)
self._task.stop()
self._start()
class MessageListener(Listener):
"""Listens for messages on CAN bus and feeds them to a Network instance.
:param canopen.Network network:
The network to notify on new messages.
"""
def __init__(self, network):
self.network = network
def on_message_received(self, msg):
if msg.is_error_frame or msg.is_remote_frame:
return
try:
self.network.notify(msg.arbitration_id, msg.data, msg.timestamp)
except Exception as e:
# Exceptions in any callbaks should not affect CAN processing
logger.error(str(e))
class NodeScanner(object):
"""Observes which nodes are present on the bus.
Listens for the following messages:
- Heartbeat (0x700)
- SDO response (0x580)
- TxPDO (0x180, 0x280, 0x380, 0x480)
- EMCY (0x80)
:param canopen.Network network:
The network to use when doing active searching.
"""
#: Activate or deactivate scanning
active = True
SERVICES = (0x700, 0x580, 0x180, 0x280, 0x380, 0x480, 0x80)
def __init__(self, network=None):
self.network = network
#: A :class:`list` of nodes discovered
self.nodes = []
def on_message_received(self, can_id):
service = can_id & 0x780
node_id = can_id & 0x7F
if node_id not in self.nodes and node_id != 0 and service in self.SERVICES:
self.nodes.append(node_id)
def reset(self):
"""Clear list of found nodes."""
self.nodes = []
def search(self, limit=127):
"""Search for nodes by sending SDO requests to all node IDs."""
if self.network is None:
raise RuntimeError("A Network is required to do active scanning")
sdo_req = b"\x40\x00\x10\x00\x00\x00\x00\x00"
for node_id in range(1, limit + 1):
self.network.send_message(0x600 + node_id, sdo_req)
|
mit
| -5,675,439,393,431,230,000
| 31.552083
| 114
| 0.5908
| false
| 4.204507
| false
| false
| false
|
opreaalex/skeletout
|
templates/_app_name_/helpers.py
|
1
|
2399
|
# -*- coding: utf-8 -*-
"""
_app_name_.helpers
~~~~~~~~~~~~~~~~
_app_name_ helpers module
"""
import pkgutil
import importlib
from flask import Blueprint
from flask.json import JSONEncoder as BaseJSONEncoder
def register_blueprints(app, package_name, package_path):
"""Register all Blueprint instances on the specified Flask application found
in all modules for the specified package.
:param app: the Flask application
:param package_name: the package name
:param package_path: the package path
"""
rv = []
for _, name, _ in pkgutil.iter_modules(package_path):
m = importlib.import_module('%s.%s' % (package_name, name))
for item in dir(m):
item = getattr(m, item)
if isinstance(item, Blueprint):
app.register_blueprint(item)
rv.append(item)
return rv
class JSONEncoder(BaseJSONEncoder):
"""Custom :class:`JSONEncoder` which respects objects that include the
:class:`JsonSerializer` mixin.
"""
def default(self, obj):
if isinstance(obj, JsonSerializer):
return obj.to_json()
return super(JSONEncoder, self).default(obj)
class JsonSerializer(object):
"""A mixin that can be used to mark a SQLAlchemy model class which
implements a :func:`to_json` method. The :func:`to_json` method is used
in conjuction with the custom :class:`JSONEncoder` class. By default this
mixin will assume all properties of the SQLAlchemy model are to be visible
in the JSON output. Extend this class to customize which properties are
public, hidden or modified before being being passed to the JSON serializer.
"""
__json_public__ = None
__json_hidden__ = None
__json_modifiers__ = None
def get_field_names(self):
for p in self.__mapper__.iterate_properties:
yield p.key
def to_json(self):
field_names = self.get_field_names()
public = self.__json_public__ or field_names
hidden = self.__json_hidden__ or []
modifiers = self.__json_modifiers__ or dict()
rv = dict()
for key in public:
rv[key] = getattr(self, key)
for key, modifier in modifiers.items():
value = getattr(self, key)
rv[key] = modifier(value, self)
for key in hidden:
rv.pop(key, None)
return rv
|
mit
| 7,411,269,388,410,093,000
| 30.155844
| 80
| 0.628178
| false
| 4.179443
| false
| false
| false
|
dabrahams/zeroinstall
|
zeroinstall/injector/model.py
|
1
|
45719
|
"""In-memory representation of interfaces and other data structures.
The objects in this module are used to build a representation of an XML interface
file in memory.
@see: L{reader} constructs these data-structures
@see: U{http://0install.net/interface-spec.html} description of the domain model
@var defaults: Default values for the 'default' attribute for <environment> bindings of
well-known variables.
"""
# Copyright (C) 2009, Thomas Leonard
# See the README file for details, or visit http://0install.net.
from zeroinstall import _
import os, re, locale
from logging import info, debug, warn
from zeroinstall import SafeException, version
from zeroinstall.injector.namespaces import XMLNS_IFACE
from zeroinstall.injector import qdom
# Element names for bindings in feed files
binding_names = frozenset(['environment', 'overlay', 'executable-in-path', 'executable-in-var'])
network_offline = 'off-line'
network_minimal = 'minimal'
network_full = 'full'
network_levels = (network_offline, network_minimal, network_full)
stability_levels = {} # Name -> Stability
defaults = {
'PATH': '/bin:/usr/bin',
'XDG_CONFIG_DIRS': '/etc/xdg',
'XDG_DATA_DIRS': '/usr/local/share:/usr/share',
}
class InvalidInterface(SafeException):
"""Raised when parsing an invalid feed."""
feed_url = None
def __init__(self, message, ex = None):
if ex:
try:
message += "\n\n(exact error: %s)" % ex
except:
# Some Python messages have type str but contain UTF-8 sequences.
# (e.g. IOException). Adding these to a Unicode 'message' (e.g.
# after gettext translation) will cause an error.
import codecs
decoder = codecs.lookup('utf-8')
decex = decoder.decode(str(ex), errors = 'replace')[0]
message += "\n\n(exact error: %s)" % decex
SafeException.__init__(self, message)
def __unicode__(self):
if hasattr(SafeException, '__unicode__'):
# Python >= 2.6
if self.feed_url:
return _('%s [%s]') % (SafeException.__unicode__(self), self.feed_url)
return SafeException.__unicode__(self)
else:
return unicode(SafeException.__str__(self))
def _split_arch(arch):
"""Split an arch into an (os, machine) tuple. Either or both parts may be None."""
if not arch:
return None, None
elif '-' not in arch:
raise SafeException(_("Malformed arch '%s'") % arch)
else:
osys, machine = arch.split('-', 1)
if osys == '*': osys = None
if machine == '*': machine = None
return osys, machine
def _join_arch(osys, machine):
if osys == machine == None: return None
return "%s-%s" % (osys or '*', machine or '*')
def _best_language_match(options):
(language, encoding) = locale.getlocale()
if language:
# xml:lang uses '-', while LANG uses '_'
language = language.replace('_', '-')
else:
language = 'en-US'
return (options.get(language, None) or # Exact match (language+region)
options.get(language.split('-', 1)[0], None) or # Matching language
options.get('en', None)) # English
class Stability(object):
"""A stability rating. Each implementation has an upstream stability rating and,
optionally, a user-set rating."""
__slots__ = ['level', 'name', 'description']
def __init__(self, level, name, description):
self.level = level
self.name = name
self.description = description
assert name not in stability_levels
stability_levels[name] = self
def __cmp__(self, other):
return cmp(self.level, other.level)
def __str__(self):
return self.name
def __repr__(self):
return _("<Stability: %s>") % self.description
def process_binding(e):
"""Internal"""
if e.name == 'environment':
mode = {
None: EnvironmentBinding.PREPEND,
'prepend': EnvironmentBinding.PREPEND,
'append': EnvironmentBinding.APPEND,
'replace': EnvironmentBinding.REPLACE,
}[e.getAttribute('mode')]
binding = EnvironmentBinding(e.getAttribute('name'),
insert = e.getAttribute('insert'),
default = e.getAttribute('default'),
value = e.getAttribute('value'),
mode = mode,
separator = e.getAttribute('separator'))
if not binding.name: raise InvalidInterface(_("Missing 'name' in binding"))
if binding.insert is None and binding.value is None:
raise InvalidInterface(_("Missing 'insert' or 'value' in binding"))
if binding.insert is not None and binding.value is not None:
raise InvalidInterface(_("Binding contains both 'insert' and 'value'"))
return binding
elif e.name == 'executable-in-path':
return ExecutableBinding(e, in_path = True)
elif e.name == 'executable-in-var':
return ExecutableBinding(e, in_path = False)
elif e.name == 'overlay':
return OverlayBinding(e.getAttribute('src'), e.getAttribute('mount-point'))
else:
raise Exception(_("Unknown binding type '%s'") % e.name)
def process_depends(item, local_feed_dir):
"""Internal"""
# Note: also called from selections
attrs = item.attrs
dep_iface = item.getAttribute('interface')
if not dep_iface:
raise InvalidInterface(_("Missing 'interface' on <%s>") % item.name)
if dep_iface.startswith('.'):
if local_feed_dir:
dep_iface = os.path.abspath(os.path.join(local_feed_dir, dep_iface))
# (updates the element too, in case we write it out again)
attrs['interface'] = dep_iface
else:
raise InvalidInterface(_('Relative interface URI "%s" in non-local feed') % dep_iface)
dependency = InterfaceDependency(dep_iface, element = item)
for e in item.childNodes:
if e.uri != XMLNS_IFACE: continue
if e.name in binding_names:
dependency.bindings.append(process_binding(e))
elif e.name == 'version':
dependency.restrictions.append(
VersionRangeRestriction(not_before = parse_version(e.getAttribute('not-before')),
before = parse_version(e.getAttribute('before'))))
return dependency
def N_(message): return message
insecure = Stability(0, N_('insecure'), _('This is a security risk'))
buggy = Stability(5, N_('buggy'), _('Known to have serious bugs'))
developer = Stability(10, N_('developer'), _('Work-in-progress - bugs likely'))
testing = Stability(20, N_('testing'), _('Stability unknown - please test!'))
stable = Stability(30, N_('stable'), _('Tested - no serious problems found'))
packaged = Stability(35, N_('packaged'), _('Supplied by the local package manager'))
preferred = Stability(40, N_('preferred'), _('Best of all - must be set manually'))
del N_
class Restriction(object):
"""A Restriction limits the allowed implementations of an Interface."""
__slots__ = []
def meets_restriction(self, impl):
"""Called by the L{solver.Solver} to check whether a particular implementation is acceptable.
@return: False if this implementation is not a possibility
@rtype: bool
"""
raise NotImplementedError(_("Abstract"))
class VersionRestriction(Restriction):
"""Only select implementations with a particular version number.
@since: 0.40"""
def __init__(self, version):
"""@param version: the required version number
@see: L{parse_version}; use this to pre-process the version number
"""
self.version = version
def meets_restriction(self, impl):
return impl.version == self.version
def __str__(self):
return _("(restriction: version = %s)") % format_version(self.version)
class VersionRangeRestriction(Restriction):
"""Only versions within the given range are acceptable"""
__slots__ = ['before', 'not_before']
def __init__(self, before, not_before):
"""@param before: chosen versions must be earlier than this
@param not_before: versions must be at least this high
@see: L{parse_version}; use this to pre-process the versions
"""
self.before = before
self.not_before = not_before
def meets_restriction(self, impl):
if self.not_before and impl.version < self.not_before:
return False
if self.before and impl.version >= self.before:
return False
return True
def __str__(self):
if self.not_before is not None or self.before is not None:
range = ''
if self.not_before is not None:
range += format_version(self.not_before) + ' <= '
range += 'version'
if self.before is not None:
range += ' < ' + format_version(self.before)
else:
range = 'none'
return _("(restriction: %s)") % range
class Binding(object):
"""Information about how the choice of a Dependency is made known
to the application being run."""
@property
def command(self):
""""Returns the name of the specific command needed by this binding, if any.
@since: 1.2"""
return None
class EnvironmentBinding(Binding):
"""Indicate the chosen implementation using an environment variable."""
__slots__ = ['name', 'insert', 'default', 'mode', 'value']
PREPEND = 'prepend'
APPEND = 'append'
REPLACE = 'replace'
def __init__(self, name, insert, default = None, mode = PREPEND, value=None, separator=None):
"""
mode argument added in version 0.28
value argument added in version 0.52
"""
self.name = name
self.insert = insert
self.default = default
self.mode = mode
self.value = value
if separator is None:
self.separator = os.pathsep
else:
self.separator = separator
def __str__(self):
return _("<environ %(name)s %(mode)s %(insert)s %(value)s>") % \
{'name': self.name, 'mode': self.mode, 'insert': self.insert, 'value': self.value}
__repr__ = __str__
def get_value(self, path, old_value):
"""Calculate the new value of the environment variable after applying this binding.
@param path: the path to the selected implementation
@param old_value: the current value of the environment variable
@return: the new value for the environment variable"""
if self.insert is not None:
extra = os.path.join(path, self.insert)
else:
assert self.value is not None
extra = self.value
if self.mode == EnvironmentBinding.REPLACE:
return extra
if old_value is None:
old_value = self.default or defaults.get(self.name, None)
if old_value is None:
return extra
if self.mode == EnvironmentBinding.PREPEND:
return extra + self.separator + old_value
else:
return old_value + self.separator + extra
def _toxml(self, doc, prefixes):
"""Create a DOM element for this binding.
@param doc: document to use to create the element
@return: the new element
"""
env_elem = doc.createElementNS(XMLNS_IFACE, 'environment')
env_elem.setAttributeNS(None, 'name', self.name)
if self.mode is not None:
env_elem.setAttributeNS(None, 'mode', self.mode)
if self.insert is not None:
env_elem.setAttributeNS(None, 'insert', self.insert)
else:
env_elem.setAttributeNS(None, 'value', self.value)
if self.default:
env_elem.setAttributeNS(None, 'default', self.default)
if self.separator:
env_elem.setAttributeNS(None, 'separator', self.separator)
return env_elem
class ExecutableBinding(Binding):
"""Make the chosen command available in $PATH.
@ivar in_path: True to add the named command to $PATH, False to store in named variable
@type in_path: bool
"""
__slots__ = ['qdom']
def __init__(self, qdom, in_path):
self.qdom = qdom
self.in_path = in_path
def __str__(self):
return str(self.qdom)
__repr__ = __str__
def _toxml(self, doc, prefixes):
return self.qdom.toDOM(doc, prefixes)
@property
def name(self):
return self.qdom.getAttribute('name')
@property
def command(self):
return self.qdom.getAttribute("command") or 'run'
class OverlayBinding(Binding):
"""Make the chosen implementation available by overlaying it onto another part of the file-system.
This is to support legacy programs which use hard-coded paths."""
__slots__ = ['src', 'mount_point']
def __init__(self, src, mount_point):
self.src = src
self.mount_point = mount_point
def __str__(self):
return _("<overlay %(src)s on %(mount_point)s>") % {'src': self.src or '.', 'mount_point': self.mount_point or '/'}
__repr__ = __str__
def _toxml(self, doc, prefixes):
"""Create a DOM element for this binding.
@param doc: document to use to create the element
@return: the new element
"""
env_elem = doc.createElementNS(XMLNS_IFACE, 'overlay')
if self.src is not None:
env_elem.setAttributeNS(None, 'src', self.src)
if self.mount_point is not None:
env_elem.setAttributeNS(None, 'mount-point', self.mount_point)
return env_elem
class Feed(object):
"""An interface's feeds are other interfaces whose implementations can also be
used as implementations of this interface."""
__slots__ = ['uri', 'os', 'machine', 'user_override', 'langs']
def __init__(self, uri, arch, user_override, langs = None):
self.uri = uri
# This indicates whether the feed comes from the user's overrides
# file. If true, writer.py will write it when saving.
self.user_override = user_override
self.os, self.machine = _split_arch(arch)
self.langs = langs
def __str__(self):
return "<Feed from %s>" % self.uri
__repr__ = __str__
arch = property(lambda self: _join_arch(self.os, self.machine))
class Dependency(object):
"""A Dependency indicates that an Implementation requires some additional
code to function. This is an abstract base class.
@ivar qdom: the XML element for this Dependency (since 0launch 0.51)
@type qdom: L{qdom.Element}
@ivar metadata: any extra attributes from the XML element
@type metadata: {str: str}
"""
__slots__ = ['qdom']
Essential = "essential"
Recommended = "recommended"
def __init__(self, element):
assert isinstance(element, qdom.Element), type(element) # Use InterfaceDependency instead!
self.qdom = element
@property
def metadata(self):
return self.qdom.attrs
@property
def importance(self):
return self.qdom.getAttribute("importance") or Dependency.Essential
def get_required_commands(self):
"""Return a list of command names needed by this dependency"""
return []
class InterfaceDependency(Dependency):
"""A Dependency on a Zero Install interface.
@ivar interface: the interface required by this dependency
@type interface: str
@ivar restrictions: a list of constraints on acceptable implementations
@type restrictions: [L{Restriction}]
@ivar bindings: how to make the choice of implementation known
@type bindings: [L{Binding}]
@since: 0.28
"""
__slots__ = ['interface', 'restrictions', 'bindings']
def __init__(self, interface, restrictions = None, element = None):
Dependency.__init__(self, element)
assert isinstance(interface, (str, unicode))
assert interface
self.interface = interface
if restrictions is None:
self.restrictions = []
else:
self.restrictions = restrictions
self.bindings = []
def __str__(self):
return _("<Dependency on %(interface)s; bindings: %(bindings)s%(restrictions)s>") % {'interface': self.interface, 'bindings': self.bindings, 'restrictions': self.restrictions}
def get_required_commands(self):
"""Return a list of command names needed by this dependency"""
if self.qdom.name == 'runner':
commands = [self.qdom.getAttribute('command') or 'run']
else:
commands = []
for b in self.bindings:
c = b.command
if c is not None:
commands.append(c)
return commands
@property
def command(self):
if self.qdom.name == 'runner':
return self.qdom.getAttribute('command') or 'run'
return None
class RetrievalMethod(object):
"""A RetrievalMethod provides a way to fetch an implementation."""
__slots__ = []
class DownloadSource(RetrievalMethod):
"""A DownloadSource provides a way to fetch an implementation."""
__slots__ = ['implementation', 'url', 'size', 'extract', 'start_offset', 'type']
def __init__(self, implementation, url, size, extract, start_offset = 0, type = None):
self.implementation = implementation
self.url = url
self.size = size
self.extract = extract
self.start_offset = start_offset
self.type = type # MIME type - see unpack.py
class Recipe(RetrievalMethod):
"""Get an implementation by following a series of steps.
@ivar size: the combined download sizes from all the steps
@type size: int
@ivar steps: the sequence of steps which must be performed
@type steps: [L{RetrievalMethod}]"""
__slots__ = ['steps']
def __init__(self):
self.steps = []
size = property(lambda self: sum([x.size for x in self.steps]))
class DistributionSource(RetrievalMethod):
"""A package that is installed using the distribution's tools (including PackageKit).
@ivar install: a function to call to install this package
@type install: (L{handler.Handler}) -> L{tasks.Blocker}
@ivar package_id: the package name, in a form recognised by the distribution's tools
@type package_id: str
@ivar size: the download size in bytes
@type size: int
@ivar needs_confirmation: whether the user should be asked to confirm before calling install()
@type needs_confirmation: bool"""
__slots__ = ['package_id', 'size', 'install', 'needs_confirmation']
def __init__(self, package_id, size, install, needs_confirmation = True):
RetrievalMethod.__init__(self)
self.package_id = package_id
self.size = size
self.install = install
self.needs_confirmation = needs_confirmation
class Command(object):
"""A Command is a way of running an Implementation as a program."""
__slots__ = ['qdom', '_depends', '_local_dir', '_runner', '_bindings']
def __init__(self, qdom, local_dir):
"""@param qdom: the <command> element
@param local_dir: the directory containing the feed (for relative dependencies), or None if not local
"""
assert qdom.name == 'command', 'not <command>: %s' % qdom
self.qdom = qdom
self._local_dir = local_dir
self._depends = None
self._bindings = None
path = property(lambda self: self.qdom.attrs.get("path", None))
def _toxml(self, doc, prefixes):
return self.qdom.toDOM(doc, prefixes)
@property
def requires(self):
if self._depends is None:
self._runner = None
depends = []
for child in self.qdom.childNodes:
if child.name == 'requires':
dep = process_depends(child, self._local_dir)
depends.append(dep)
elif child.name == 'runner':
if self._runner:
raise InvalidInterface(_("Multiple <runner>s in <command>!"))
dep = process_depends(child, self._local_dir)
depends.append(dep)
self._runner = dep
self._depends = depends
return self._depends
def get_runner(self):
self.requires # (sets _runner)
return self._runner
def __str__(self):
return str(self.qdom)
@property
def bindings(self):
"""@since: 1.3"""
if self._bindings is None:
bindings = []
for e in self.qdom.childNodes:
if e.uri != XMLNS_IFACE: continue
if e.name in binding_names:
bindings.append(process_binding(e))
self._bindings = bindings
return self._bindings
class Implementation(object):
"""An Implementation is a package which implements an Interface.
@ivar download_sources: list of methods of getting this implementation
@type download_sources: [L{RetrievalMethod}]
@ivar feed: the feed owning this implementation (since 0.32)
@type feed: [L{ZeroInstallFeed}]
@ivar bindings: how to tell this component where it itself is located (since 0.31)
@type bindings: [Binding]
@ivar upstream_stability: the stability reported by the packager
@type upstream_stability: [insecure | buggy | developer | testing | stable | packaged]
@ivar user_stability: the stability as set by the user
@type upstream_stability: [insecure | buggy | developer | testing | stable | packaged | preferred]
@ivar langs: natural languages supported by this package
@type langs: str
@ivar requires: interfaces this package depends on
@type requires: [L{Dependency}]
@ivar commands: ways to execute as a program
@type commands: {str: Command}
@ivar metadata: extra metadata from the feed
@type metadata: {"[URI ]localName": str}
@ivar id: a unique identifier for this Implementation
@ivar version: a parsed version number
@ivar released: release date
@ivar local_path: the directory containing this local implementation, or None if it isn't local (id isn't a path)
@type local_path: str | None
@ivar requires_root_install: whether the user will need admin rights to use this
@type requires_root_install: bool
"""
# Note: user_stability shouldn't really be here
__slots__ = ['upstream_stability', 'user_stability', 'langs',
'requires', 'metadata', 'download_sources', 'commands',
'id', 'feed', 'version', 'released', 'bindings', 'machine']
def __init__(self, feed, id):
assert id
self.feed = feed
self.id = id
self.user_stability = None
self.upstream_stability = None
self.metadata = {} # [URI + " "] + localName -> value
self.requires = []
self.version = None
self.released = None
self.download_sources = []
self.langs = ""
self.machine = None
self.bindings = []
self.commands = {}
def get_stability(self):
return self.user_stability or self.upstream_stability or testing
def __str__(self):
return self.id
def __repr__(self):
return "v%s (%s)" % (self.get_version(), self.id)
def __cmp__(self, other):
"""Newer versions come first"""
d = cmp(other.version, self.version)
if d: return d
# If the version number is the same, just give a stable sort order, and
# ensure that two different implementations don't compare equal.
d = cmp(other.feed.url, self.feed.url)
if d: return d
return cmp(other.id, self.id)
def get_version(self):
"""Return the version as a string.
@see: L{format_version}
"""
return format_version(self.version)
arch = property(lambda self: _join_arch(self.os, self.machine))
os = None
local_path = None
digests = None
requires_root_install = False
def _get_main(self):
""""@deprecated: use commands["run"] instead"""
main = self.commands.get("run", None)
if main is not None:
return main.path
return None
def _set_main(self, path):
""""@deprecated: use commands["run"] instead"""
if path is None:
if "run" in self.commands:
del self.commands["run"]
else:
self.commands["run"] = Command(qdom.Element(XMLNS_IFACE, 'command', {'path': path, 'name': 'run'}), None)
main = property(_get_main, _set_main)
def is_available(self, stores):
"""Is this Implementation available locally?
(a local implementation, an installed distribution package, or a cached ZeroInstallImplementation)
@rtype: bool
@since: 0.53
"""
raise NotImplementedError("abstract")
class DistributionImplementation(Implementation):
"""An implementation provided by the distribution. Information such as the version
comes from the package manager.
@ivar package_implementation: the <package-implementation> element that generated this impl (since 1.7)
@type package_implementation: L{qdom.Element}
@since: 0.28"""
__slots__ = ['distro', 'installed', 'package_implementation']
def __init__(self, feed, id, distro, package_implementation = None):
assert id.startswith('package:')
Implementation.__init__(self, feed, id)
self.distro = distro
self.installed = False
self.package_implementation = package_implementation
if package_implementation:
for child in package_implementation.childNodes:
if child.uri != XMLNS_IFACE: continue
if child.name == 'command':
command_name = child.attrs.get('name', None)
if not command_name:
raise InvalidInterface('Missing name for <command>')
self.commands[command_name] = Command(child, local_dir = None)
@property
def requires_root_install(self):
return not self.installed
def is_available(self, stores):
return self.installed
class ZeroInstallImplementation(Implementation):
"""An implementation where all the information comes from Zero Install.
@ivar digests: a list of "algorith=value" strings (since 0.45)
@type digests: [str]
@since: 0.28"""
__slots__ = ['os', 'size', 'digests', 'local_path']
def __init__(self, feed, id, local_path):
"""id can be a local path (string starting with /) or a manifest hash (eg "sha1=XXX")"""
assert not id.startswith('package:'), id
Implementation.__init__(self, feed, id)
self.size = None
self.os = None
self.digests = []
self.local_path = local_path
# Deprecated
dependencies = property(lambda self: dict([(x.interface, x) for x in self.requires
if isinstance(x, InterfaceDependency)]))
def add_download_source(self, url, size, extract, start_offset = 0, type = None):
"""Add a download source."""
self.download_sources.append(DownloadSource(self, url, size, extract, start_offset, type))
def set_arch(self, arch):
self.os, self.machine = _split_arch(arch)
arch = property(lambda self: _join_arch(self.os, self.machine), set_arch)
def is_available(self, stores):
if self.local_path is not None:
return os.path.exists(self.local_path)
if self.digests:
path = stores.lookup_maybe(self.digests)
return path is not None
return False # (0compile creates fake entries with no digests)
class Interface(object):
"""An Interface represents some contract of behaviour.
@ivar uri: the URI for this interface.
@ivar stability_policy: user's configured policy.
Implementations at this level or higher are preferred.
Lower levels are used only if there is no other choice.
"""
__slots__ = ['uri', 'stability_policy', 'extra_feeds']
implementations = property(lambda self: self._main_feed.implementations)
name = property(lambda self: self._main_feed.name)
description = property(lambda self: self._main_feed.description)
summary = property(lambda self: self._main_feed.summary)
last_modified = property(lambda self: self._main_feed.last_modified)
feeds = property(lambda self: self.extra_feeds + self._main_feed.feeds)
metadata = property(lambda self: self._main_feed.metadata)
last_checked = property(lambda self: self._main_feed.last_checked)
def __init__(self, uri):
assert uri
if uri.startswith('http:') or uri.startswith('https:') or os.path.isabs(uri):
self.uri = uri
else:
raise SafeException(_("Interface name '%s' doesn't start "
"with 'http:' or 'https:'") % uri)
self.reset()
def _get_feed_for(self):
retval = {}
for key in self._main_feed.feed_for:
retval[key] = True
return retval
feed_for = property(_get_feed_for) # Deprecated (used by 0publish)
def reset(self):
self.extra_feeds = []
self.stability_policy = None
def get_name(self):
from zeroinstall.injector.iface_cache import iface_cache
feed = iface_cache.get_feed(self.uri)
if feed:
return feed.get_name()
return '(' + os.path.basename(self.uri) + ')'
def __repr__(self):
return _("<Interface %s>") % self.uri
def set_stability_policy(self, new):
assert new is None or isinstance(new, Stability)
self.stability_policy = new
def get_feed(self, url):
#import warnings
#warnings.warn("use iface_cache.get_feed instead", DeprecationWarning, 2)
for x in self.extra_feeds:
if x.uri == url:
return x
#return self._main_feed.get_feed(url)
return None
def get_metadata(self, uri, name):
return self._main_feed.get_metadata(uri, name)
@property
def _main_feed(self):
#import warnings
#warnings.warn("use the feed instead", DeprecationWarning, 3)
from zeroinstall.injector import policy
iface_cache = policy.get_deprecated_singleton_config().iface_cache
feed = iface_cache.get_feed(self.uri)
if feed is None:
return _dummy_feed
return feed
def _merge_attrs(attrs, item):
"""Add each attribute of item to a copy of attrs and return the copy.
@type attrs: {str: str}
@type item: L{qdom.Element}
@rtype: {str: str}
"""
new = attrs.copy()
for a in item.attrs:
new[str(a)] = item.attrs[a]
return new
def _get_long(elem, attr_name):
val = elem.getAttribute(attr_name)
if val is not None:
try:
val = int(val)
except ValueError:
raise SafeException(_("Invalid value for integer attribute '%(attribute_name)s': %(value)s") % {'attribute_name': attr_name, 'value': val})
return val
class ZeroInstallFeed(object):
"""A feed lists available implementations of an interface.
@ivar url: the URL for this feed
@ivar implementations: Implementations in this feed, indexed by ID
@type implementations: {str: L{Implementation}}
@ivar name: human-friendly name
@ivar summaries: short textual description (in various languages, since 0.49)
@type summaries: {str: str}
@ivar descriptions: long textual description (in various languages, since 0.49)
@type descriptions: {str: str}
@ivar last_modified: timestamp on signature
@ivar last_checked: time feed was last successfully downloaded and updated
@ivar local_path: the path of this local feed, or None if remote (since 1.7)
@type local_path: str | None
@ivar feeds: list of <feed> elements in this feed
@type feeds: [L{Feed}]
@ivar feed_for: interfaces for which this could be a feed
@type feed_for: set(str)
@ivar metadata: extra elements we didn't understand
"""
# _main is deprecated
__slots__ = ['url', 'implementations', 'name', 'descriptions', 'first_description', 'summaries', 'first_summary', '_package_implementations',
'last_checked', 'last_modified', 'feeds', 'feed_for', 'metadata', 'local_path']
def __init__(self, feed_element, local_path = None, distro = None):
"""Create a feed object from a DOM.
@param feed_element: the root element of a feed file
@type feed_element: L{qdom.Element}
@param local_path: the pathname of this local feed, or None for remote feeds"""
self.local_path = local_path
self.implementations = {}
self.name = None
self.summaries = {} # { lang: str }
self.first_summary = None
self.descriptions = {} # { lang: str }
self.first_description = None
self.last_modified = None
self.feeds = []
self.feed_for = set()
self.metadata = []
self.last_checked = None
self._package_implementations = []
if distro is not None:
import warnings
warnings.warn("distro argument is now ignored", DeprecationWarning, 2)
if feed_element is None:
return # XXX subclass?
assert feed_element.name in ('interface', 'feed'), "Root element should be <interface>, not %s" % feed_element
assert feed_element.uri == XMLNS_IFACE, "Wrong namespace on root element: %s" % feed_element.uri
main = feed_element.getAttribute('main')
#if main: warn("Setting 'main' on the root element is deprecated. Put it on a <group> instead")
if local_path:
self.url = local_path
local_dir = os.path.dirname(local_path)
else:
assert local_path is None
self.url = feed_element.getAttribute('uri')
if not self.url:
raise InvalidInterface(_("<interface> uri attribute missing"))
local_dir = None # Can't have relative paths
min_injector_version = feed_element.getAttribute('min-injector-version')
if min_injector_version:
if parse_version(min_injector_version) > parse_version(version):
raise InvalidInterface(_("This feed requires version %(min_version)s or later of "
"Zero Install, but I am only version %(version)s. "
"You can get a newer version from http://0install.net") %
{'min_version': min_injector_version, 'version': version})
for x in feed_element.childNodes:
if x.uri != XMLNS_IFACE:
self.metadata.append(x)
continue
if x.name == 'name':
self.name = x.content
elif x.name == 'description':
if self.first_description == None:
self.first_description = x.content
self.descriptions[x.attrs.get("http://www.w3.org/XML/1998/namespace lang", 'en')] = x.content
elif x.name == 'summary':
if self.first_summary == None:
self.first_summary = x.content
self.summaries[x.attrs.get("http://www.w3.org/XML/1998/namespace lang", 'en')] = x.content
elif x.name == 'feed-for':
feed_iface = x.getAttribute('interface')
if not feed_iface:
raise InvalidInterface(_('Missing "interface" attribute in <feed-for>'))
self.feed_for.add(feed_iface)
# Bug report from a Debian/stable user that --feed gets the wrong value.
# Can't reproduce (even in a Debian/stable chroot), but add some logging here
# in case it happens again.
debug(_("Is feed-for %s"), feed_iface)
elif x.name == 'feed':
feed_src = x.getAttribute('src')
if not feed_src:
raise InvalidInterface(_('Missing "src" attribute in <feed>'))
if feed_src.startswith('http:') or feed_src.startswith('https:') or local_path:
langs = x.getAttribute('langs')
if langs: langs = langs.replace('_', '-')
self.feeds.append(Feed(feed_src, x.getAttribute('arch'), False, langs = langs))
else:
raise InvalidInterface(_("Invalid feed URL '%s'") % feed_src)
else:
self.metadata.append(x)
if not self.name:
raise InvalidInterface(_("Missing <name> in feed"))
if not self.summary:
raise InvalidInterface(_("Missing <summary> in feed"))
def process_group(group, group_attrs, base_depends, base_bindings, base_commands):
for item in group.childNodes:
if item.uri != XMLNS_IFACE: continue
if item.name not in ('group', 'implementation', 'package-implementation'):
continue
# We've found a group or implementation. Scan for dependencies,
# bindings and commands. Doing this here means that:
# - We can share the code for groups and implementations here.
# - The order doesn't matter, because these get processed first.
# A side-effect is that the document root cannot contain
# these.
depends = base_depends[:]
bindings = base_bindings[:]
commands = base_commands.copy()
for attr, command in [('main', 'run'),
('self-test', 'test')]:
value = item.attrs.get(attr, None)
if value is not None:
commands[command] = Command(qdom.Element(XMLNS_IFACE, 'command', {'name': command, 'path': value}), None)
for child in item.childNodes:
if child.uri != XMLNS_IFACE: continue
if child.name == 'requires':
dep = process_depends(child, local_dir)
depends.append(dep)
elif child.name == 'command':
command_name = child.attrs.get('name', None)
if not command_name:
raise InvalidInterface('Missing name for <command>')
commands[command_name] = Command(child, local_dir)
elif child.name in binding_names:
bindings.append(process_binding(child))
compile_command = item.attrs.get('http://zero-install.sourceforge.net/2006/namespaces/0compile command')
if compile_command is not None:
commands['compile'] = Command(qdom.Element(XMLNS_IFACE, 'command', {'name': 'compile', 'shell-command': compile_command}), None)
item_attrs = _merge_attrs(group_attrs, item)
if item.name == 'group':
process_group(item, item_attrs, depends, bindings, commands)
elif item.name == 'implementation':
process_impl(item, item_attrs, depends, bindings, commands)
elif item.name == 'package-implementation':
if depends:
warn("A <package-implementation> with dependencies in %s!", self.url)
self._package_implementations.append((item, item_attrs))
else:
assert 0
def process_impl(item, item_attrs, depends, bindings, commands):
id = item.getAttribute('id')
if id is None:
raise InvalidInterface(_("Missing 'id' attribute on %s") % item)
local_path = item_attrs.get('local-path')
if local_dir and local_path:
abs_local_path = os.path.abspath(os.path.join(local_dir, local_path))
impl = ZeroInstallImplementation(self, id, abs_local_path)
elif local_dir and (id.startswith('/') or id.startswith('.')):
# For old feeds
id = os.path.abspath(os.path.join(local_dir, id))
impl = ZeroInstallImplementation(self, id, id)
else:
impl = ZeroInstallImplementation(self, id, None)
if '=' in id:
# In older feeds, the ID was the (single) digest
impl.digests.append(id)
if id in self.implementations:
warn(_("Duplicate ID '%(id)s' in feed '%(feed)s'"), {'id': id, 'feed': self})
self.implementations[id] = impl
impl.metadata = item_attrs
try:
version_mod = item_attrs.get('version-modifier', None)
if version_mod:
item_attrs['version'] += version_mod
del item_attrs['version-modifier']
version = item_attrs['version']
except KeyError:
raise InvalidInterface(_("Missing version attribute"))
impl.version = parse_version(version)
impl.commands = commands
impl.released = item_attrs.get('released', None)
impl.langs = item_attrs.get('langs', '').replace('_', '-')
size = item.getAttribute('size')
if size:
impl.size = int(size)
impl.arch = item_attrs.get('arch', None)
try:
stability = stability_levels[str(item_attrs['stability'])]
except KeyError:
stab = str(item_attrs['stability'])
if stab != stab.lower():
raise InvalidInterface(_('Stability "%s" invalid - use lower case!') % item_attrs.stability)
raise InvalidInterface(_('Stability "%s" invalid') % item_attrs['stability'])
if stability >= preferred:
raise InvalidInterface(_("Upstream can't set stability to preferred!"))
impl.upstream_stability = stability
impl.bindings = bindings
impl.requires = depends
for elem in item.childNodes:
if elem.uri != XMLNS_IFACE: continue
if elem.name == 'archive':
url = elem.getAttribute('href')
if not url:
raise InvalidInterface(_("Missing href attribute on <archive>"))
size = elem.getAttribute('size')
if not size:
raise InvalidInterface(_("Missing size attribute on <archive>"))
impl.add_download_source(url = url, size = int(size),
extract = elem.getAttribute('extract'),
start_offset = _get_long(elem, 'start-offset'),
type = elem.getAttribute('type'))
elif elem.name == 'manifest-digest':
for aname, avalue in elem.attrs.iteritems():
if ' ' not in aname:
impl.digests.append('%s=%s' % (aname, avalue))
elif elem.name == 'recipe':
recipe = Recipe()
for recipe_step in elem.childNodes:
if recipe_step.uri == XMLNS_IFACE and recipe_step.name == 'archive':
url = recipe_step.getAttribute('href')
if not url:
raise InvalidInterface(_("Missing href attribute on <archive>"))
size = recipe_step.getAttribute('size')
if not size:
raise InvalidInterface(_("Missing size attribute on <archive>"))
recipe.steps.append(DownloadSource(None, url = url, size = int(size),
extract = recipe_step.getAttribute('extract'),
start_offset = _get_long(recipe_step, 'start-offset'),
type = recipe_step.getAttribute('type')))
else:
info(_("Unknown step '%s' in recipe; skipping recipe"), recipe_step.name)
break
else:
impl.download_sources.append(recipe)
root_attrs = {'stability': 'testing'}
root_commands = {}
if main:
info("Note: @main on document element is deprecated in %s", self)
root_commands['run'] = Command(qdom.Element(XMLNS_IFACE, 'command', {'path': main, 'name': 'run'}), None)
process_group(feed_element, root_attrs, [], [], root_commands)
def get_distro_feed(self):
"""Does this feed contain any <pacakge-implementation> elements?
i.e. is it worth asking the package manager for more information?
@return: the URL of the virtual feed, or None
@since: 0.49"""
if self._package_implementations:
return "distribution:" + self.url
return None
def get_package_impls(self, distro):
"""Find the best <pacakge-implementation> element(s) for the given distribution.
@param distro: the distribution to use to rate them
@type distro: L{distro.Distribution}
@return: a list of tuples for the best ranked elements
@rtype: [str]
@since: 0.49"""
best_score = 0
best_impls = []
for item, item_attrs in self._package_implementations:
distro_names = item_attrs.get('distributions', '')
for distro_name in distro_names.split(' '):
score = distro.get_score(distro_name)
if score > best_score:
best_score = score
best_impls = []
if score == best_score:
best_impls.append((item, item_attrs))
return best_impls
def get_name(self):
return self.name or '(' + os.path.basename(self.url) + ')'
def __repr__(self):
return _("<Feed %s>") % self.url
def set_stability_policy(self, new):
assert new is None or isinstance(new, Stability)
self.stability_policy = new
def get_feed(self, url):
for x in self.feeds:
if x.uri == url:
return x
return None
def add_metadata(self, elem):
self.metadata.append(elem)
def get_metadata(self, uri, name):
"""Return a list of interface metadata elements with this name and namespace URI."""
return [m for m in self.metadata if m.name == name and m.uri == uri]
@property
def summary(self):
return _best_language_match(self.summaries) or self.first_summary
@property
def description(self):
return _best_language_match(self.descriptions) or self.first_description
def get_replaced_by(self):
"""Return the URI of the interface that replaced the one with the URI of this feed's URL.
This is the value of the feed's <replaced-by interface'...'/> element.
@return: the new URI, or None if it hasn't been replaced
@since: 1.7"""
for child in self.metadata:
if child.uri == XMLNS_IFACE and child.name == 'replaced-by':
new_uri = child.getAttribute('interface')
if new_uri and (new_uri.startswith('http:') or new_uri.startswith('https:') or self.local_path):
return new_uri
return None
class DummyFeed(object):
"""Temporary class used during API transition."""
last_modified = None
name = '-'
last_checked = property(lambda self: None)
implementations = property(lambda self: {})
feeds = property(lambda self: [])
summary = property(lambda self: '-')
description = property(lambda self: '')
def get_name(self): return self.name
def get_feed(self, url): return None
def get_metadata(self, uri, name): return []
_dummy_feed = DummyFeed()
def unescape(uri):
"""Convert each %20 to a space, etc.
@rtype: str"""
uri = uri.replace('#', '/')
if '%' not in uri: return uri
return re.sub('%[0-9a-fA-F][0-9a-fA-F]',
lambda match: chr(int(match.group(0)[1:], 16)),
uri).decode('utf-8')
def escape(uri):
"""Convert each space to %20, etc
@rtype: str"""
return re.sub('[^-_.a-zA-Z0-9]',
lambda match: '%%%02x' % ord(match.group(0)),
uri.encode('utf-8'))
def _pretty_escape(uri):
"""Convert each space to %20, etc
: is preserved and / becomes #. This makes for nicer strings,
and may replace L{escape} everywhere in future.
@rtype: str"""
if os.name == "posix":
# Only preserve : on Posix systems
preserveRegex = '[^-_.a-zA-Z0-9:/]'
else:
# Other OSes may not allow the : character in file names
preserveRegex = '[^-_.a-zA-Z0-9/]'
return re.sub(preserveRegex,
lambda match: '%%%02x' % ord(match.group(0)),
uri.encode('utf-8')).replace('/', '#')
def canonical_iface_uri(uri):
"""If uri is a relative path, convert to an absolute one.
A "file:///foo" URI is converted to "/foo".
An "alias:prog" URI expands to the URI in the 0alias script
Otherwise, return it unmodified.
@rtype: str
@raise SafeException: if uri isn't valid
"""
if uri.startswith('http://') or uri.startswith('https://'):
if uri.count("/") < 3:
raise SafeException(_("Missing / after hostname in URI '%s'") % uri)
return uri
elif uri.startswith('file:///'):
path = uri[7:]
elif uri.startswith('file:'):
if uri[5] == '/':
raise SafeException(_('Use file:///path for absolute paths, not {uri}').format(uri = uri))
path = os.path.abspath(uri[5:])
elif uri.startswith('alias:'):
from zeroinstall import alias, support
alias_prog = uri[6:]
if not os.path.isabs(alias_prog):
full_path = support.find_in_path(alias_prog)
if not full_path:
raise alias.NotAnAliasScript("Not found in $PATH: " + alias_prog)
else:
full_path = alias_prog
return alias.parse_script(full_path).uri
else:
path = os.path.realpath(uri)
if os.path.isfile(path):
return path
raise SafeException(_("Bad interface name '%(uri)s'.\n"
"(doesn't start with 'http:', and "
"doesn't exist as a local file '%(interface_uri)s' either)") %
{'uri': uri, 'interface_uri': path})
_version_mod_to_value = {
'pre': -2,
'rc': -1,
'': 0,
'post': 1,
}
# Reverse mapping
_version_value_to_mod = {}
for x in _version_mod_to_value: _version_value_to_mod[_version_mod_to_value[x]] = x
del x
_version_re = re.compile('-([a-z]*)')
def parse_version(version_string):
"""Convert a version string to an internal representation.
The parsed format can be compared quickly using the standard Python functions.
- Version := DottedList ("-" Mod DottedList?)*
- DottedList := (Integer ("." Integer)*)
@rtype: tuple (opaque)
@raise SafeException: if the string isn't a valid version
@since: 0.24 (moved from L{reader}, from where it is still available):"""
if version_string is None: return None
parts = _version_re.split(version_string)
if parts[-1] == '':
del parts[-1] # Ends with a modifier
else:
parts.append('')
if not parts:
raise SafeException(_("Empty version string!"))
l = len(parts)
try:
for x in range(0, l, 2):
part = parts[x]
if part:
parts[x] = map(int, parts[x].split('.'))
else:
parts[x] = [] # (because ''.split('.') == [''], not [])
for x in range(1, l, 2):
parts[x] = _version_mod_to_value[parts[x]]
return parts
except ValueError as ex:
raise SafeException(_("Invalid version format in '%(version_string)s': %(exception)s") % {'version_string': version_string, 'exception': ex})
except KeyError as ex:
raise SafeException(_("Invalid version modifier in '%(version_string)s': %(exception)s") % {'version_string': version_string, 'exception': ex})
def format_version(version):
"""Format a parsed version for display. Undoes the effect of L{parse_version}.
@see: L{Implementation.get_version}
@rtype: str
@since: 0.24"""
version = version[:]
l = len(version)
for x in range(0, l, 2):
version[x] = '.'.join(map(str, version[x]))
for x in range(1, l, 2):
version[x] = '-' + _version_value_to_mod[version[x]]
if version[-1] == '-': del version[-1]
return ''.join(version)
|
lgpl-2.1
| -2,451,330,524,096,340,000
| 33.426958
| 177
| 0.683326
| false
| 3.312971
| false
| false
| false
|
lemonade512/BluebonnetsPointsApp
|
bluebonnetspointsapp/routes.py
|
1
|
8807
|
"""`main` is the top level module for your Flask application."""
import logging
import json
from flask import Flask, request, redirect, url_for, jsonify
from flask_restful import Resource, Api
from google.appengine.api import users
from google.appengine.ext import deferred
from datetime import datetime
from models.user_model import UserData
from models.point_model import PointException, PointCategory, PointRecord
from models.event_model import Event
from utils.jinja import render_jinja_template
from permissions import require_permissions
from utils.update_schema import run_update_schema
# Create the flask app
app = Flask(__name__)
api = Api(app)
# *************************************************************************** #
# FLASK ROUTES #
# *************************************************************************** #
@app.route('/')
def index():
template_values = {
'active_page': 'home',
'target_user': UserData.get_current_user_data(),
}
if UserData.get_current_user_data():
return render_jinja_template("dashboard.html", template_values)
else:
return render_jinja_template("index.html", template_values)
@app.route('/dashboard')
@app.route('/dashboard/<user_url_segment>')
@require_permissions(['self', 'officer'], logic='or')
def dashboard(user_url_segment=None):
if user_url_segment is None:
target_user = UserData.get_current_user_data()
else:
target_user = UserData.get_from_url_segment(user_url_segment)
if target_user is None:
template_values = {
'target_user': user_url_segment,
}
return render_jinja_template("noprofile.html", template_values), 404
if target_user.username != user_url_segment:
return redirect('/dashboard/{0}'.format(target_user.username))
# If looking at the current user's profile, hilight the users name in the
# nav bar
if target_user == UserData.get_current_user_data():
return redirect('/'.format(target_user.username))
else:
active = None
template_values = {
'target_user': target_user,
}
return render_jinja_template("dashboard.html", template_values)
@app.route('/admin')
@require_permissions(['admin'])
def admin():
template_values = {
'active_page': 'admin',
}
return render_jinja_template("admin.html", template_values)
@app.route('/members')
@require_permissions(['officer'])
def members():
template_values = {
'active_page': 'members',
'users': UserData.query().order(UserData.first_name),
}
return render_jinja_template("members.html", template_values)
@app.route('/permissions')
@require_permissions(['officer'])
def permissions():
template_values = {
'active_page': "permissions",
'users': UserData.query().order(UserData.first_name),
}
return render_jinja_template("permissions.html", template_values)
# TODO (phillip): The only people who should be able to view a users profile page are
# officers and the user himself
@app.route('/profile/<user_url_segment>')
@require_permissions(['self', 'officer'], logic='or')
def profile(user_url_segment):
target_user = UserData.get_from_url_segment(user_url_segment)
if target_user is None:
template_values = {
'target_user': user_url_segment,
}
return render_jinja_template("noprofile.html", template_values), 404
if target_user.username != user_url_segment:
return redirect('/profile/{0}'.format(target_user.username))
# If looking at the current user's profile, hilight the users name in the
# nav bar
if target_user == UserData.get_current_user_data():
active = 'profile'
else:
active = None
template_values = {
'active_page': active,
'target_user': target_user,
}
return render_jinja_template("profile.html", template_values)
@app.route('/login')
def login():
next_url = url_for("postlogin", next=request.args.get("next", "/"))
template_values = {
'active_page': 'login',
'google_login_url': users.create_login_url(next_url),
}
return render_jinja_template("login.html", template_values)
# TODO (phillip): There might be an issue if the user logs into their google account then doesn't
# go through the signup process. Then if they click the back button a few times they will
# be logged into their google account but not have their UserData setup which could be
# an issue. Just make sure to be careful of that
@app.route('/postlogin')
def postlogin():
""" Handler for just after a user has logged in
This takes care of making sure the user has properly setup their account.
"""
next_url = request.args.get("next", "/")
user_data = UserData.get_current_user_data()
if not user_data:
# Need to create a user account
signup_url = url_for("signup", next=next_url)
return redirect(signup_url)
else:
return redirect(next_url)
@app.route('/signup')
def signup():
template_values = {
'next': request.args.get("next", "/"),
}
return render_jinja_template("signup.html", template_values)
@app.route('/point-categories')
@require_permissions(['officer'])
def point_categories():
template_values = {
'active_page': 'point-categories',
}
return render_jinja_template('point-categories.html', template_values)
@app.route('/events')
@require_permissions(['officer'])
def event_list():
template_values = {
'active_page': 'events',
}
return render_jinja_template('events.html', template_values)
# TODO (phillip): handle the case when the event does not exist
@app.route('/events/<event>')
def event(event):
event = Event.get_from_name(event)
template_values = {
'target_event': event,
}
return render_jinja_template('event.html', template_values)
# **************************************************************************** #
# Error Handlers #
# **************************************************************************** #
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return render_jinja_template("404.html"), 404
@app.errorhandler(500)
def application_error(e):
"""Return a custom 500 error."""
template_values = {
'msg': "Sorry, unexpected error: {}".format(e)
}
return render_jinja_template("500.html", template_values), 500
# *************************************************************************** #
# REST API ENDPOINTS #
# *************************************************************************** #
from controllers.event_controller import EventAPI, EventListAPI
from controllers.exception_controller import ExceptionAPI, ExceptionListAPI
from controllers.permission_controller import PermissionAPI, PermissionListAPI
from controllers.point_controller import PointRecordAPI, PointCategoryAPI, PointCategoryListAPI
from controllers.user_controller import UserAPI, UserListAPI, UserPointsAPI
api.add_resource(UserListAPI, '/api/users', endpoint='users')
api.add_resource(UserAPI, '/api/users/<string:user_id>', endpoint='user')
api.add_resource(ExceptionListAPI, '/api/users/<string:user_id>/point-exceptions')
api.add_resource(ExceptionAPI, '/api/users/<string:user_id>/point-exceptions/<int:index>')
api.add_resource(PermissionListAPI, '/api/users/<string:user_id>/permissions')
api.add_resource(PermissionAPI, '/api/users/<string:user_id>/permissions/<string:perm>')
api.add_resource(PointCategoryListAPI, '/api/point-categories')
api.add_resource(PointCategoryAPI, '/api/point-categories/<string:name>')
api.add_resource(EventListAPI, '/api/events')
api.add_resource(EventAPI, '/api/events/<string:event>')
api.add_resource(PointRecordAPI, '/api/point-records')
api.add_resource(UserPointsAPI, '/api/users/<string:user_id>/points')
# *************************************************************************** #
# ADMIN #
# *************************************************************************** #
@app.route("/admin/updateschema")
def updateschema():
# NOTE: Sometimes there can be issues with the prerendering done by the
# chrome address bar. In that case, you might see duplicate GET requests.
# Be very aware of this when updating schema or going to endpoints that
# could potentially destroy user data.
deferred.defer(run_update_schema)
return 'Schema migration successfully initiated.'
if __name__ == "__main__":
logging.getLogger().setLevel(logging.debug)
|
gpl-3.0
| 1,609,514,002,009,681,000
| 35.695833
| 97
| 0.622459
| false
| 4.006824
| false
| false
| false
|
evanunderscore/pygnurl
|
pygnurl/callback_mananger.py
|
1
|
1302
|
"""Callback management utilities"""
import collections
from ctypes import * # pylint: disable=wildcard-import,unused-wildcard-import
import logging
class CallbackManager(object):
"""Manager for ctypes DLL hooks"""
def __init__(self, dll):
self.dll = dll
self.hooks = collections.defaultdict(dict)
self.logger = logging.getLogger(__name__)
def install(self, name, func):
"""
Install a callback function ensuring a reference is kept.
:param name: name of function to install
:param func: callback function to install
"""
self.logger.debug('installing callback for %s in %s', name, self.dll)
self._install(name, func)
def uninstall(self, name):
"""
Remove an installed callback function.
:param name: name of function to uninstall
"""
self.logger.debug('uninstalling callback for %s in %s', name, self.dll)
self._install(name)
def _install(self, name, func=None):
"""Install or remove a callback function"""
# install the callback function
# pylint: disable=no-member
c_void_p.in_dll(self.dll, name).value = cast(func, c_void_p).value
# store the function so it doesn't get GC'd
self.hooks[name] = func
|
gpl-2.0
| 4,814,608,374,765,069,000
| 34.189189
| 79
| 0.6298
| false
| 4.2
| false
| false
| false
|
eHealthAfrica/LMIS
|
LMIS/inventory/api/serializers.py
|
1
|
2227
|
"""
Serializer for Inventory App related API end-points
"""
#import LMIS project modules
from core.api.serializers import BaseModelSerializer
from inventory.models import (Inventory, InventoryLine, ConsumptionRecord, ConsumptionRecordLine, IncomingShipment,
IncomingShipmentLine, OutgoingShipment, OutgoingShipmentLine)
class InventoryLineSerializer(BaseModelSerializer):
"""
Inventory Line serializer for Inventory records
"""
class Meta:
model = InventoryLine
class InventorySerializer(BaseModelSerializer):
"""
Inventory Model serializer
"""
class Meta:
model = Inventory
class ConsumptionRecordSerializer(BaseModelSerializer):
"""
Consumption Record Serializer used by the API endpoint to serialize Consumption records
"""
class Meta:
model = ConsumptionRecord
class ConsumptionRecordLineSerializer(BaseModelSerializer):
"""
ConsumptionRecordLine Serializer used by the API end-point to serialize ConsumptionRecordLine records
"""
class Meta:
model = ConsumptionRecordLine
class IncomingShipmentSerializer(BaseModelSerializer):
"""
IncomingShipmentSerializer used by the API end-point
"""
class Meta:
model = IncomingShipment
fields = ('supplier', 'stock_entry_type', 'input_warehouse', 'other', 'other_source', 'is_deleted',
'incoming_shipment_lines',)
class IncomingShipmentLineSerializer(BaseModelSerializer):
"""
IncomingShipmentSerializer used by the API end-point
"""
class Meta:
model = IncomingShipmentLine
class OutgoingShipmentSerializer(BaseModelSerializer):
"""
OutgoingShipmentSerializer is used by the API end-point to serialize OutgoingShipment records
"""
class Meta:
model = OutgoingShipment
fields = ('recipient', 'output_warehouse', 'status', 'is_deleted', 'outgoing_shipment_lines')
class OutgoingShipmentLineSerializer(BaseModelSerializer):
"""
OutgoingShipmentLineSerializer is used by the API end-points to serialize OutgoingShipmentLine records
"""
class Meta:
model = OutgoingShipmentLine
|
gpl-2.0
| -7,145,018,752,463,819,000
| 28.315789
| 115
| 0.705433
| false
| 4.668763
| false
| false
| false
|
hoto17296/flask-minitwit
|
server/views.py
|
1
|
3272
|
from flask import request, session, url_for, redirect, render_template, abort, g, flash
from . import app
from .lib import Auth, AuthError, User, Timeline
@app.before_request
def before_request():
g.auth = Auth(session, app.config.get('SECRET_KEY'))
@app.route('/')
def timeline():
if not g.auth.authorized():
return redirect(url_for('public_timeline'))
return render_template('timeline.html', timeline=Timeline.following(g.auth.user))
@app.route('/public')
def public_timeline():
return render_template('timeline.html', timeline=Timeline.public())
@app.route('/<name>')
def user_timeline(name):
user = User.find_by('name', name)
if user is None:
abort(404)
following = False
if g.auth.authorized():
following = g.auth.user.is_following(user)
return render_template('timeline.html', timeline=Timeline.user(user), following=following)
@app.route('/<name>/follow')
def follow_user(name):
if not g.auth.authorized():
abort(401)
user = User.find_by('name', name)
if user is None:
abort(404)
g.auth.user.follow(user)
flash('You are now following "%s"' % name)
return redirect(url_for('user_timeline', name=name))
@app.route('/<name>/unfollow')
def unfollow_user(name):
if not g.auth.authorized():
abort(401)
user = User.find_by('name', name)
if user is None:
abort(404)
g.auth.user.unfollow(user)
flash('You are no longer following "%s"' % name)
return redirect(url_for('user_timeline', name=name))
@app.route('/add_message', methods=['POST'])
def add_message():
if not g.auth.authorized():
abort(401)
if request.form['text']:
g.auth.user.post_message(request.form['text'])
flash('Your message was recorded')
return redirect(url_for('timeline'))
@app.route('/login', methods=['GET', 'POST'])
def login():
if g.auth.authorized():
return redirect(url_for('timeline'))
error = None
if request.method == 'POST':
try:
g.auth.login(request.form['name'], request.form['password'])
flash('You were logged in')
return redirect(url_for('timeline'))
except AuthError as err:
error = str(err)
return render_template('login.html', error=error)
@app.route('/register', methods=['GET', 'POST'])
def register():
if g.auth.authorized():
return redirect(url_for('timeline'))
error = None
if request.method == 'POST':
try:
if request.form['password'] != request.form['password2']:
raise AuthError('The two passwords do not match')
g.auth.register({
'name': request.form['name'],
'email': request.form['email'],
'password': request.form['password'],
})
flash('You were successfully registered')
g.auth.login(request.form['name'], request.form['password'])
return redirect(url_for('timeline'))
except AuthError as err:
error = str(err)
return render_template('register.html', error=error)
@app.route('/logout')
def logout():
flash('You were logged out')
g.auth.logout()
return redirect(url_for('public_timeline'))
|
bsd-3-clause
| -7,375,406,376,846,156,000
| 29.018349
| 94
| 0.61522
| false
| 3.743707
| false
| false
| false
|
snare/voltron
|
voltron/styles.py
|
1
|
2382
|
from pygments.style import Style
from pygments.token import Token, Comment, Name, Keyword, Generic, Number, Operator, String, Punctuation, Error
BASE03 = '#002b36'
BASE02 = '#073642'
BASE01 = '#586e75'
BASE00 = '#657b83'
BASE0 = '#839496'
BASE1 = '#93a1a1'
BASE2 = '#eee8d5'
BASE3 = '#fdf6e3'
YELLOW = '#b58900'
ORANGE = '#cb4b16'
RED = '#dc322f'
MAGENTA = '#d33682'
VIOLET = '#6c71c4'
BLUE = '#268bd2'
CYAN = '#2aa198'
GREEN = '#859900'
class VolarizedStyle(Style):
background_color = BASE03
styles = {
Keyword: GREEN,
Keyword.Constant: ORANGE,
Keyword.Declaration: BASE1,
Keyword.Namespace: ORANGE,
# Keyword.Pseudo
Keyword.Reserved: BLUE,
Keyword.Type: VIOLET,
Name: BASE1,
Name.Attribute: BASE1,
Name.Builtin: YELLOW,
Name.Builtin.Pseudo: YELLOW,
Name.Class: BLUE,
Name.Constant: ORANGE,
Name.Decorator: BLUE,
Name.Entity: ORANGE,
Name.Exception: YELLOW,
Name.Function: BLUE,
Name.Label: BASE01,
# Name.Namespace
# Name.Other
Name.Tag: BLUE,
Name.Variable: BLUE,
# Name.Variable.Class
# Name.Variable.Global
# Name.Variable.Instance
# Literal
# Literal.Date
String: BASE1,
String.Backtick: BASE01,
String.Char: BASE1,
String.Doc: CYAN,
# String.Double
String.Escape: RED,
String.Heredoc: CYAN,
# String.Interpol
# String.Other
String.Regex: RED,
# String.Single
# String.Symbol
Number: CYAN,
# Number.Float
# Number.Hex
# Number.Integer
# Number.Integer.Long
# Number.Oct
Operator: GREEN,
Operator.Word: GREEN,
Punctuation: BASE00,
Comment: BASE00,
# Comment.Multiline
Comment.Preproc: GREEN,
# Comment.Single
Comment.Special: GREEN,
# Generic
Generic.Deleted: CYAN,
Generic.Emph: 'italic',
Generic.Error: RED,
Generic.Heading: ORANGE,
Generic.Inserted: GREEN,
# Generic.Output
Generic.Prompt: RED,
Generic.Strong: 'bold',
Generic.Subheading: ORANGE,
# Generic.Traceback
Token: BASE1,
Token.Other: ORANGE,
Error: RED
}
|
mit
| -600,035,029,460,470,800
| 22.82
| 111
| 0.569689
| false
| 3.598187
| false
| false
| false
|
rrafiringa/is210-week-04-synthesizing
|
task_01.py
|
1
|
1415
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Temperature conversion """
import decimal
decimal.getcontext().prec = 5
ABSOLUTE_DIFFERENCE = decimal.Decimal('273.15')
def fahrenheit_to_kelvin(degrees):
""" Convert temperature from Farenheit to Kelvin units.
Args:
degrees (float): Farenheit temperature units to convert.
Returns:
Decimal: Temperature unit in Kelvin.
Examples:
>>> task_01.fahrenheit_to_kelvin(212)
Decimal('373.15')
"""
kelvin = celsius_to_kelvin(fahrenheit_to_celsius(degrees))
return kelvin
def celsius_to_kelvin(degrees):
""" Convert temperature from Celsius to Kelvin units.
Args:
degrees (float): Celsius units to convert.
Returns:
Decimal: Temperature unit in Kelvin.
Examples:
>>> task_01.celsius_to_kelvin(100)
Decimal('373.15')
"""
kelvin = decimal.Decimal(degrees) + ABSOLUTE_DIFFERENCE
return kelvin
def fahrenheit_to_celsius(degrees):
""" Convert temperature from Farenheit to Celsius units.
Args:
degrees (float): Farenheit value to convert to Celsius
Returns:
Decimal: Temperature unit in Celsius.
Examples:
>>> task_01.fahrenheit_to_celsius(212)
Decimal('100')
"""
celsius = decimal.Decimal(5) * \
decimal.Decimal(float(degrees) - 32) / decimal.Decimal(9)
return celsius
|
mpl-2.0
| 3,742,161,318,189,985,000
| 21.109375
| 65
| 0.642403
| false
| 3.223235
| false
| false
| false
|
grbot/agd
|
laura/add-ANC-to-vcf.py
|
1
|
3691
|
#!/usr/bin/env python
# Author: Jeffrey M Kidd
# 2 September 2011
# add-ANC-to-vcf.py
# adds ancestral annotation based on ensemble takes from genome data archive
# you'll have to do your own filtering based on qual etc.
import sys
import os
import genomedata
import math
from genomedata import Genome
from optparse import OptionParser
USAGE = """
add-ANC-to-vcf.py --in <vcf file to process> --out <new VCF file name> -g <in/out is gziped>
--genomedata <path to genome data archieve with GERP scores>
Adds ancestral state SNPs in VCF, based on values in genomedata archieve (in 'anc' track).
Use -g if input VCF is gzipped, output file will also be gzipped.
Note: current version assumes all variants in VCF are SNPs.
"""
parser = OptionParser(USAGE)
parser.add_option('--in',dest='inVCF', help = 'input VCF file')
parser.add_option('--out',dest='outVCF', help = 'output VCF file')
parser.add_option('-g',dest='isGzip', action='store_true', default = False, help = 'output VCF file')
parser.add_option('--genomedata',dest='genomedata', help = 'genomedata archive with GERP scores')
(options, args) = parser.parse_args()
if options.inVCF is None:
parser.error('input VCF not given')
if options.outVCF is None:
parser.error('output VCF not given')
if options.genomedata is None:
parser.error('genomedata archive not given')
###############################################################################
# try to open up the genome data archieve
try:
genome = Genome(options.genomedata)
except:
print "ERROR!! Couldn't open the genomedata archive: " + options.genomedata + "\n"
sys.exit(1)
#setup file open/close with or without gzip
if options.isGzip is True:
try:
gc = 'gunzip -c ' + options.inVCF
inFile = os.popen(gc, 'r')
except:
print "ERROR!! Couldn't open the file" + options.inVCF + " (with gzip)\n"
sys.exit(1)
try:
gc = 'gzip > ' + options.outVCF
outFile = os.popen(gc, 'w')
except:
print "ERROR!! Couldn't open the output file" + options.outVCF + " (with gzip)\n"
sys.exit(1)
else:
inFile = open(options.inVCF,'r')
outFile = open(options.outVCF,'w')
# read through VCF file up to the chrom line, we will then add addtional info fields
line = inFile.readline()
while line.split('\t')[0] != '#CHROM':
outFile.write(line)
line = inFile.readline()
# at this point, line is the 'header' line of the VCF. Output header for the GERP info line
ancInfoLine = '##INFO=<ID=ANC,Number=1,Type=Character,Description="ancestral state from ensemble">\n'
outFile.write(ancInfoLine)
outFile.write(line)
# rest of the VCF file should now just be the variants
# Set current chrom as something that isn't a chrom
currentChrom = 'notAChrom'
while True:
line = inFile.readline()
if line == "":
break
line = line.rstrip()
line = line.split('\t')
if line[0] != currentChrom:
chrom = genome[line[0]]
currentChrom = line[0]
pos = int(line[1]) - 1 #switch to zero based indexing
score = chrom[pos,'anc']
# check to see if there is a GERP score for the position, if not output line and continue
# We should probably check ot see if the variant is not a SNP, as GERP isn't well defined
# for non-SNP variants
if math.isnan(score):
anc = '.'
else:
anc = chr(score)
if line[7] == '.':
line[7] = 'ANC=%s' % (anc)
else :
rsField = ';ANC=%s' % (anc)
line[7] += rsField
line = '\t'.join(line)
line = line + '\n'
outFile.write(line)
genome.close()
inFile.close()
outFile.close()
|
mit
| 3,805,306,246,244,782,000
| 27.175573
| 102
| 0.636684
| false
| 3.328224
| false
| false
| false
|
Hillshum/gPodder-tagging
|
src/gpodder/gtkui/desktop/episodeselector.py
|
1
|
18139
|
# -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2010 Thomas Perl and the gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import gtk
import pango
from xml.sax import saxutils
import gpodder
_ = gpodder.gettext
N_ = gpodder.ngettext
from gpodder import util
from gpodder.liblogger import log
from gpodder.gtkui.interface.common import BuilderWidget
class gPodderEpisodeSelector(BuilderWidget):
"""Episode selection dialog
Optional keyword arguments that modify the behaviour of this dialog:
- callback: Function that takes 1 parameter which is a list of
the selected episodes (or empty list when none selected)
- remove_callback: Function that takes 1 parameter which is a list
of episodes that should be "removed" (see below)
(default is None, which means remove not possible)
- remove_action: Label for the "remove" action (default is "Remove")
- remove_finished: Callback after all remove callbacks have finished
(default is None, also depends on remove_callback)
It will get a list of episode URLs that have been
removed, so the main UI can update those
- episodes: List of episodes that are presented for selection
- selected: (optional) List of boolean variables that define the
default checked state for the given episodes
- selected_default: (optional) The default boolean value for the
checked state if no other value is set
(default is False)
- columns: List of (name, sort_name, sort_type, caption) pairs for the
columns, the name is the attribute name of the episode to be
read from each episode object. The sort name is the
attribute name of the episode to be used to sort this column.
If the sort_name is None it will use the attribute name for
sorting. The sort type is the type of the sort column.
The caption attribute is the text that appear as column caption
(default is [('title_markup', None, None, 'Episode'),])
- title: (optional) The title of the window + heading
- instructions: (optional) A one-line text describing what the
user should select / what the selection is for
- stock_ok_button: (optional) Will replace the "OK" button with
another GTK+ stock item to be used for the
affirmative button of the dialog (e.g. can
be gtk.STOCK_DELETE when the episodes to be
selected will be deleted after closing the
dialog)
- selection_buttons: (optional) A dictionary with labels as
keys and callbacks as values; for each
key a button will be generated, and when
the button is clicked, the callback will
be called for each episode and the return
value of the callback (True or False) will
be the new selected state of the episode
- size_attribute: (optional) The name of an attribute of the
supplied episode objects that can be used to
calculate the size of an episode; set this to
None if no total size calculation should be
done (in cases where total size is useless)
(default is 'length')
- tooltip_attribute: (optional) The name of an attribute of
the supplied episode objects that holds
the text for the tooltips when hovering
over an episode (default is 'description')
"""
finger_friendly_widgets = ['btnCancel', 'btnOK', 'btnCheckAll', 'btnCheckNone', 'treeviewEpisodes']
COLUMN_INDEX = 0
COLUMN_TOOLTIP = 1
COLUMN_TOGGLE = 2
COLUMN_ADDITIONAL = 3
def new( self):
self._config.connect_gtk_window(self.gPodderEpisodeSelector, 'episode_selector', True)
if not hasattr( self, 'callback'):
self.callback = None
if not hasattr(self, 'remove_callback'):
self.remove_callback = None
if not hasattr(self, 'remove_action'):
self.remove_action = _('Remove')
if not hasattr(self, 'remove_finished'):
self.remove_finished = None
if not hasattr( self, 'episodes'):
self.episodes = []
if not hasattr( self, 'size_attribute'):
self.size_attribute = 'length'
if not hasattr(self, 'tooltip_attribute'):
self.tooltip_attribute = 'description'
if not hasattr( self, 'selection_buttons'):
self.selection_buttons = {}
if not hasattr( self, 'selected_default'):
self.selected_default = False
if not hasattr( self, 'selected'):
self.selected = [self.selected_default]*len(self.episodes)
if len(self.selected) < len(self.episodes):
self.selected += [self.selected_default]*(len(self.episodes)-len(self.selected))
if not hasattr( self, 'columns'):
self.columns = (('title_markup', None, None, _('Episode')),)
if hasattr( self, 'title'):
self.gPodderEpisodeSelector.set_title( self.title)
self.labelHeading.set_markup( '<b><big>%s</big></b>' % saxutils.escape( self.title))
if hasattr( self, 'instructions'):
self.labelInstructions.set_text( self.instructions)
self.labelInstructions.show_all()
if hasattr(self, 'stock_ok_button'):
if self.stock_ok_button == 'gpodder-download':
self.btnOK.set_image(gtk.image_new_from_stock(gtk.STOCK_GO_DOWN, gtk.ICON_SIZE_BUTTON))
self.btnOK.set_label(_('Download'))
else:
self.btnOK.set_label(self.stock_ok_button)
self.btnOK.set_use_stock(True)
# check/uncheck column
toggle_cell = gtk.CellRendererToggle()
toggle_cell.connect( 'toggled', self.toggle_cell_handler)
toggle_column = gtk.TreeViewColumn('', toggle_cell, active=self.COLUMN_TOGGLE)
toggle_column.set_clickable(True)
self.treeviewEpisodes.append_column(toggle_column)
next_column = self.COLUMN_ADDITIONAL
for name, sort_name, sort_type, caption in self.columns:
renderer = gtk.CellRendererText()
if next_column < self.COLUMN_ADDITIONAL + 1:
renderer.set_property('ellipsize', pango.ELLIPSIZE_END)
column = gtk.TreeViewColumn(caption, renderer, markup=next_column)
column.set_clickable(False)
column.set_resizable( True)
# Only set "expand" on the first column
if next_column < self.COLUMN_ADDITIONAL + 1:
column.set_expand(True)
if sort_name is not None:
column.set_sort_column_id(next_column+1)
else:
column.set_sort_column_id(next_column)
self.treeviewEpisodes.append_column( column)
next_column += 1
if sort_name is not None:
# add the sort column
column = gtk.TreeViewColumn()
column.set_clickable(False)
column.set_visible(False)
self.treeviewEpisodes.append_column( column)
next_column += 1
column_types = [ int, str, bool ]
# add string column type plus sort column type if it exists
for name, sort_name, sort_type, caption in self.columns:
column_types.append(str)
if sort_name is not None:
column_types.append(sort_type)
self.model = gtk.ListStore( *column_types)
tooltip = None
for index, episode in enumerate( self.episodes):
if self.tooltip_attribute is not None:
try:
tooltip = getattr(episode, self.tooltip_attribute)
except:
log('Episode object %s does not have tooltip attribute: "%s"', episode, self.tooltip_attribute, sender=self)
tooltip = None
row = [ index, tooltip, self.selected[index] ]
for name, sort_name, sort_type, caption in self.columns:
if not hasattr(episode, name):
log('Warning: Missing attribute "%s"', name, sender=self)
row.append(None)
else:
row.append(getattr( episode, name))
if sort_name is not None:
if not hasattr(episode, sort_name):
log('Warning: Missing attribute "%s"', sort_name, sender=self)
row.append(None)
else:
row.append(getattr( episode, sort_name))
self.model.append( row)
if self.remove_callback is not None:
self.btnRemoveAction.show()
self.btnRemoveAction.set_label(self.remove_action)
# connect to tooltip signals
if self.tooltip_attribute is not None:
try:
self.treeviewEpisodes.set_property('has-tooltip', True)
self.treeviewEpisodes.connect('query-tooltip', self.treeview_episodes_query_tooltip)
except:
log('I cannot set has-tooltip/query-tooltip (need at least PyGTK 2.12)', sender=self)
self.last_tooltip_episode = None
self.episode_list_can_tooltip = True
self.treeviewEpisodes.connect('button-press-event', self.treeview_episodes_button_pressed)
self.treeviewEpisodes.set_rules_hint( True)
self.treeviewEpisodes.set_model( self.model)
self.treeviewEpisodes.columns_autosize()
# Focus the toggle column for Tab-focusing (bug 503)
path, column = self.treeviewEpisodes.get_cursor()
if path is not None:
self.treeviewEpisodes.set_cursor(path, toggle_column)
self.calculate_total_size()
def treeview_episodes_query_tooltip(self, treeview, x, y, keyboard_tooltip, tooltip):
# With get_bin_window, we get the window that contains the rows without
# the header. The Y coordinate of this window will be the height of the
# treeview header. This is the amount we have to subtract from the
# event's Y coordinate to get the coordinate to pass to get_path_at_pos
(x_bin, y_bin) = treeview.get_bin_window().get_position()
y -= x_bin
y -= y_bin
(path, column, rx, ry) = treeview.get_path_at_pos(x, y) or (None,)*4
if not self.episode_list_can_tooltip or column != treeview.get_columns()[1]:
self.last_tooltip_episode = None
return False
if path is not None:
model = treeview.get_model()
iter = model.get_iter(path)
index = model.get_value(iter, self.COLUMN_INDEX)
description = model.get_value(iter, self.COLUMN_TOOLTIP)
if self.last_tooltip_episode is not None and self.last_tooltip_episode != index:
self.last_tooltip_episode = None
return False
self.last_tooltip_episode = index
description = util.remove_html_tags(description)
if description is not None:
if len(description) > 400:
description = description[:398]+'[...]'
tooltip.set_text(description)
return True
else:
return False
self.last_tooltip_episode = None
return False
def treeview_episodes_button_pressed(self, treeview, event):
if event.button == 3:
menu = gtk.Menu()
if len(self.selection_buttons):
for label in self.selection_buttons:
item = gtk.MenuItem(label)
item.connect('activate', self.custom_selection_button_clicked, label)
menu.append(item)
menu.append(gtk.SeparatorMenuItem())
item = gtk.MenuItem(_('Select all'))
item.connect('activate', self.on_btnCheckAll_clicked)
menu.append(item)
item = gtk.MenuItem(_('Select none'))
item.connect('activate', self.on_btnCheckNone_clicked)
menu.append(item)
menu.show_all()
# Disable tooltips while we are showing the menu, so
# the tooltip will not appear over the menu
self.episode_list_can_tooltip = False
menu.connect('deactivate', lambda menushell: self.episode_list_allow_tooltips())
menu.popup(None, None, None, event.button, event.time)
return True
def episode_list_allow_tooltips(self):
self.episode_list_can_tooltip = True
def calculate_total_size( self):
if self.size_attribute is not None:
(total_size, count) = (0, 0)
for episode in self.get_selected_episodes():
try:
total_size += int(getattr( episode, self.size_attribute))
count += 1
except:
log( 'Cannot get size for %s', episode.title, sender = self)
text = []
if count == 0:
text.append(_('Nothing selected'))
text.append(N_('%d episode', '%d episodes', count) % count)
if total_size > 0:
text.append(_('size: %s') % util.format_filesize(total_size))
self.labelTotalSize.set_text(', '.join(text))
self.btnOK.set_sensitive(count>0)
self.btnRemoveAction.set_sensitive(count>0)
if count > 0:
self.btnCancel.set_label(gtk.STOCK_CANCEL)
else:
self.btnCancel.set_label(gtk.STOCK_CLOSE)
else:
self.btnOK.set_sensitive(False)
self.btnRemoveAction.set_sensitive(False)
for index, row in enumerate(self.model):
if self.model.get_value(row.iter, self.COLUMN_TOGGLE) == True:
self.btnOK.set_sensitive(True)
self.btnRemoveAction.set_sensitive(True)
break
self.labelTotalSize.set_text('')
def toggle_cell_handler( self, cell, path):
model = self.treeviewEpisodes.get_model()
model[path][self.COLUMN_TOGGLE] = not model[path][self.COLUMN_TOGGLE]
self.calculate_total_size()
def custom_selection_button_clicked(self, button, label):
callback = self.selection_buttons[label]
for index, row in enumerate( self.model):
new_value = callback( self.episodes[index])
self.model.set_value( row.iter, self.COLUMN_TOGGLE, new_value)
self.calculate_total_size()
def on_btnCheckAll_clicked( self, widget):
for row in self.model:
self.model.set_value( row.iter, self.COLUMN_TOGGLE, True)
self.calculate_total_size()
def on_btnCheckNone_clicked( self, widget):
for row in self.model:
self.model.set_value( row.iter, self.COLUMN_TOGGLE, False)
self.calculate_total_size()
def on_remove_action_activate(self, widget):
episodes = self.get_selected_episodes(remove_episodes=True)
urls = []
for episode in episodes:
urls.append(episode.url)
self.remove_callback(episode)
if self.remove_finished is not None:
self.remove_finished(urls)
self.calculate_total_size()
# Close the window when there are no episodes left
model = self.treeviewEpisodes.get_model()
if model.get_iter_first() is None:
self.on_btnCancel_clicked(None)
def on_row_activated(self, treeview, path, view_column):
model = treeview.get_model()
iter = model.get_iter(path)
value = model.get_value(iter, self.COLUMN_TOGGLE)
model.set_value(iter, self.COLUMN_TOGGLE, not value)
def get_selected_episodes( self, remove_episodes=False):
selected_episodes = []
for index, row in enumerate( self.model):
if self.model.get_value( row.iter, self.COLUMN_TOGGLE) == True:
selected_episodes.append( self.episodes[self.model.get_value( row.iter, self.COLUMN_INDEX)])
if remove_episodes:
for episode in selected_episodes:
index = self.episodes.index(episode)
iter = self.model.get_iter_first()
while iter is not None:
if self.model.get_value(iter, self.COLUMN_INDEX) == index:
self.model.remove(iter)
break
iter = self.model.iter_next(iter)
return selected_episodes
def on_btnOK_clicked( self, widget):
self.gPodderEpisodeSelector.destroy()
if self.callback is not None:
self.callback( self.get_selected_episodes())
def on_btnCancel_clicked( self, widget):
self.gPodderEpisodeSelector.destroy()
if self.callback is not None:
self.callback([])
|
gpl-3.0
| -1,909,551,925,635,607,300
| 42.085511
| 128
| 0.587739
| false
| 4.327052
| false
| false
| false
|
DerThorsten/nifty
|
scripts/new_plcm.py
|
1
|
19851
|
import nifty
import numpy
import nifty.segmentation
import nifty.graph.rag
import nifty.graph.agglo
import vigra
import matplotlib.pyplot as plt
from random import shuffle
#import fastfilters
numpy.random.seed(32)
Objective = nifty.graph.opt.lifted_multicut.PixelWiseLmcObjective2D
class PlmcObjective2D(nifty.graph.opt.lifted_multicut.PixelWiseLmcObjective2D):
def __init__(self,raw, affinities, weights, offsets):
self.raw = numpy.require(raw,dtype='float32')
self.affinities = affinities
self.weights = weights
self.offsets = offsets
super(PlmcObjective2D, self).__init__(weights, offsets)
def proposals_from_raw(self):
proposals = []
for sigma in (1.0, 3.0, 5.0):
raw = self.raw
hmap = vigra.filters.hessianOfGaussianEigenvalues(raw, 5.0)[:,:,0]
seg,nseg = vigra.analysis.watersheds(1.0*hmap)
proposals.append(seg)
#plt.imshow(nifty.segmentation.markBoundaries(raw/255.0, seg, color=(1,0,0)))
#plt.show()
return proposals
def proposal_from_raw_agglo(self):
proposals = []
for sigma in (1.0, 3.0, 5.0):
grow_map = vigra.filters.hessianOfGaussianEigenvalues(self.raw, sigma)[:,:,0]
overseg,nseg = vigra.analysis.watersheds(grow_map.astype('float32'))
rag = nifty.graph.rag.gridRag(overseg)
edge_features, node_features = nifty.graph.rag.accumulateMeanAndLength(
rag, grow_map, [512,512],0)
meanEdgeStrength = edge_features[:,0]
edgeSizes = edge_features[:,1]
nodeSizes = node_features[:,1]
for size_reg in (0.1,0.2,0.4,0.8):
# cluster-policy
nnodes = rag.numberOfNodes//300
nnodes = min(nnodes, 1000)
clusterPolicy = nifty.graph.agglo.edgeWeightedClusterPolicy(
graph=rag, edgeIndicators=meanEdgeStrength,
edgeSizes=edgeSizes, nodeSizes=nodeSizes,
numberOfNodesStop=nnodes, sizeRegularizer=size_reg)
# run agglomerative clustering
agglomerativeClustering = nifty.graph.agglo.agglomerativeClustering(clusterPolicy)
agglomerativeClustering.run()
nodeSeg = agglomerativeClustering.result()
# convert graph segmentation
# to pixel segmentation
seg = nifty.graph.rag.projectScalarNodeDataToPixels(rag, nodeSeg)
#plt.imshow(nifty.segmentation.segmentOverlay(self.raw, seg, showBoundaries=False))
#plt.show()
proposals.append(seg)
return proposals
def proposal_from_local_agglo(self, hmap):
proposals = []
hmap0 = vigra.filters.gaussianSmoothing(hmap, 0.1)
for sigma in (1.0, 3.0, 5.0):
hmap1 = vigra.filters.gaussianSmoothing(hmap, sigma)
grow_map = hmap0 + 0.05*hmap1
overseg,nseg = vigra.analysis.watersheds(grow_map.astype('float32'))
rag = nifty.graph.rag.gridRag(overseg)
edge_features, node_features = nifty.graph.rag.accumulateMeanAndLength(
rag, hmap, [512,512],0)
meanEdgeStrength = edge_features[:,0]
edgeSizes = edge_features[:,1]
nodeSizes = node_features[:,1]
for size_reg in (0.1,0.2,0.4,0.8):
# cluster-policy
clusterPolicy = nifty.graph.agglo.edgeWeightedClusterPolicy(
graph=rag, edgeIndicators=meanEdgeStrength,
edgeSizes=edgeSizes, nodeSizes=nodeSizes,
numberOfNodesStop=rag.numberOfNodes//10, sizeRegularizer=size_reg)
# run agglomerative clustering
agglomerativeClustering = nifty.graph.agglo.agglomerativeClustering(clusterPolicy)
agglomerativeClustering.run()
nodeSeg = agglomerativeClustering.result()
# convert graph segmentation
# to pixel segmentation
seg = nifty.graph.rag.projectScalarNodeDataToPixels(rag, nodeSeg)
#plt.imshow(nifty.segmentation.segmentOverlay(self.raw, seg, showBoundaries=False))
#plt.show()
proposals.append(seg)
return proposals
def downsample_by_two(self):
def impl(raw, weights, affinities, offsets):
shape = weights.shape[0:2]
new_shape = [s//2 for s in shape]
new_raw = vigra.sampling.resize(raw.astype('float32'), new_shape)
n_offsets = offsets.shape[0]
new_offsets = offsets.astype('float')/2.0
new_weight_dict = dict()
new_affinity_dict = dict()
def f(o):
if(o>0.0 and o<1.0):
return 1
elif(o<0.0 and o>-1.0):
return -1
else:
return int(round(o))
for i_offset in range(n_offsets):
weights_channel = weights[:,:,i_offset]
affinity_channel = affinities[:,:,i_offset]
new_weights_channel = vigra.sampling.resize(weights_channel.astype('float32'), new_shape)
new_affinity_channel = vigra.sampling.resize(affinity_channel.astype('float32'), new_shape)
offset = offsets[i_offset,:]
nx,ny = new_offsets[i_offset,:]
nx,ny = f(nx), f(ny)
if (nx,ny) in new_weight_dict:
new_weight_dict[(nx,ny)] += new_weights_channel
new_affinity_dict[(nx,ny)] += new_affinity_channel
else:
new_weight_dict[(nx,ny)] = new_weights_channel
new_affinity_dict[(nx,ny)] = new_affinity_channel
print(offset,(nx,ny))
new_offsets = [ ]
new_weights = [ ]
new_affinities = [ ]
for key in new_weight_dict.keys():
new_offsets.append(key)
new_weights.append(new_weight_dict[key])
new_affinities.append(new_affinity_dict[key])
new_weights = numpy.array(new_weights)
new_affinities = numpy.array(new_affinities)
new_offsets = numpy.array(new_offsets)
return new_raw, numpy.rollaxis(new_weights,0,3), numpy.rollaxis(new_affinities,0,3), new_offsets#numpy.swapaxes(new_offsets,0,1)
new_raw, new_weights,new_affinities, new_offsets = impl(raw=self.raw,weights=self.weights,
affinities=self.affinities, offsets=self.offsets)
return PlmcObjective2D(raw=new_raw, affinities=new_affinities, weights=new_weights, offsets=new_offsets)
def local_affinities_to_pixel(affinities, offsets):
shape = affinities.shape[0:2]
offset_dict = dict()
for i in range(offsets.shape[0]):
x,y = offsets[i,:]
key = int(x),int(y)
offset_dict[key] = i
local_edges = [
(-1, 0),
( 1, 0),
( 0,-1),
( 0, 1)
]
acc = numpy.zeros(shape)
for local_edge in local_edges:
#print("find",local_edge)
if local_edge in offset_dict:
acc += affinities[:,:, offset_dict[local_edge]]
else:
o_local_edge = tuple([-1*e for e in local_edge])
#print("missing",local_edge)
if o_local_edge in offset_dict:
#print(" using: ",o_local_edge)
o_channel = affinities[:,:, offset_dict[o_local_edge]]
padded_o_channel = numpy.pad(o_channel, 1, mode='reflect')
if local_edge == (0,1):
acc += padded_o_channel[1:shape[0]+1, 2:shape[1]+2]
elif local_edge == (1,0):
acc += padded_o_channel[2:shape[0]+2, 1:shape[1]+1]
elif local_edge == (0,-1):
acc += padded_o_channel[1:shape[0]+1, 0:shape[1]]
elif local_edge == (1,0):
acc += padded_o_channel[0:shape[0], 1:shape[1]+1]
else:
raise RuntimeError("todo")
return acc
def make_pixel_wise(affinities, offsets):
shape = affinities.shape[0:2]
big_shape = tuple([2*s for s in shape])
padding_size = int(numpy.abs(offsets).max())*2
acc = numpy.zeros(shape)
for i in range(offsets.shape[0]):
print(i)
affinity_channel = affinities[:, :, i]
affinity_channel = vigra.sampling.resize(affinity_channel, big_shape)
padded_affinity_channel = numpy.pad(affinity_channel, padding_size, mode='reflect')
sx = padding_size - offsets[i,0]
sy = padding_size - offsets[i,1]
p_affinity = padded_affinity_channel[sx: sx+big_shape[0], sy: sy+big_shape[0]]
sigma = 0.3*numpy.sum(offsets[i,:]**2)**0.5
print("sigma",sigma)
p_affinity = vigra.filters.gaussianSmoothing(p_affinity, sigma)
acc += numpy.array(vigra.sampling.resize(p_affinity, shape))
return acc
def solve_single_scale(objective, best_l=None):
shape = objective.shape
class Fuse(object):
def __init__(self,objective, best_l=None):
self.objective = objective
self.best_l = best_l
self.best_e = None
if self.best_l is not None:
self.best_e = objective.evaluate(best_l)
G = nifty.graph.UndirectedGraph
CCObj = G.LiftedMulticutObjective
greedySolverFactory = CCObj.liftedMulticutGreedyAdditiveFactory()
klSolverFactory = CCObj.liftedMulticutKernighanLinFactory()
solverFactory = CCObj.chainedSolversFactory([greedySolverFactory, greedySolverFactory])
self.fm = nifty.graph.opt.lifted_multicut.PixelWiseLmcConnetedComponentsFusion2D(
objective=self.objective,
solver_factory=solverFactory)
def fuse_with(self, labels):
labels = numpy.squeeze(labels)
labels = numpy.require(labels, requirements=['C'])
if labels.ndim == 2:
if self.best_l is None:
self.best_l = labels
else:
#print("fuuuuu")
self.best_l = self.fm.fuse(
labels,
numpy.require(self.best_l,requirements=['C'])
)
else:
labels = numpy.concatenate([self.best_l[:,:,None], labels],axis=2)
self.best_l = self.fm.fuse(labels)
self.best_e = objective.evaluate(self.best_l)
print(self.best_e)
fuse_inf = Fuse(objective=objective, best_l=best_l)
local = local_affinities_to_pixel(objective.affinities, objective.offsets)
def seeded_watersheds(sigma):
#print("thesigma",sigma)
hmap1 = vigra.filters.gaussianSmoothing(local, 0.2)
hmap2 = vigra.filters.gaussianSmoothing(local, sigma)
hmap1 += 0.03*hmap2
#print(nifty.segmentation.seededWatersheds)
seg = nifty.segmentation.seededWatersheds(hmap1, method='edge_weighted', acc='interpixel')
return seg
def refine_watershed(labels,r, sigma):
hmap1 = vigra.filters.gaussianSmoothing(local, 0.2)
hmap2 = vigra.filters.gaussianSmoothing(local, sigma)
hmap1 += 0.03*hmap2
zeros = numpy.zeros_like(labels)
boundaries = skimage.segmentation.mark_boundaries(zeros, labels.astype('uint32'))[:,:,0]*255
#print(boundaries.min(),boundaries.max())
boundaries = vigra.filters.discDilation(boundaries.astype('uint8'),r).squeeze()
new_seeds = labels + 1
where_b = numpy.where(boundaries==1)
new_seeds[boundaries==255] = 0
seg,nseg = vigra.analysis.watersheds(hmap1.astype('float32'), seeds=new_seeds.astype('uint32'))
seg = nifty.segmentation.connectedComponents(seg)
return seg
def refiner(labels,r):
grid = numpy.arange(labels.size) + labels.max() + 1
grid = grid.reshape(labels.shape)
zeros = numpy.zeros_like(labels)
boundaries = skimage.segmentation.mark_boundaries(zeros, labels.astype('uint32'))[:,:,0]*255
#print(boundaries.min(),boundaries.max())
boundaries = vigra.filters.discDilation(boundaries.astype('uint8'),r).squeeze()
new_seeds = labels.copy()
where_mask = boundaries==255
new_seeds[where_mask] = grid[where_mask]
return new_seeds
proposals = []
proposals += objective.proposals_from_raw()
proposals += objective.proposal_from_local_agglo(local)
proposals += objective.proposal_from_raw_agglo()
proposals += [seeded_watersheds(s) for s in (1.0, 2.0, 3.0)]
#shuffle(proposals)
print("fuabsf")
for proposal in proposals:
print("fuse with prop")
fuse_inf.fuse_with(proposal)
if False:
print("refine watershed")
for r in (1,2,3,4,5):
for s in (1.0, 2.0, 3.0,5.0):
p = refine_watershed(fuse_inf.best_l,r=r,sigma=s)
fuse_inf.fuse_with(p)
else:
for r in (1,2,3,4):
while(True):
print("buja",r)
best_e = float(fuse_inf.best_e)
fuse_inf.fuse_with(refiner(fuse_inf.best_l, r=2))
if fuse_inf.best_e >= best_e:
break
#sys.exit()
if True:
for ps in (1,2,3,4):
print("multi shiftey", ps)
# shift
for i in range(10):
print("Si",i)
proposals = []
best_e = float(fuse_inf.best_e)
padded = numpy.pad(fuse_inf.best_l+1, ps+1, mode='constant', constant_values=0)
for x in range(-ps,ps+1):
for y in range(-ps,ps+1):
labels = padded[
ps + x : ps + x + shape[0],
ps + y : ps + y + shape[1]
]
#labels = nifty.segmentation.connectedComponents(prop)
proposals.append(labels[:,:,None])
if len(proposals) >= 6:
proposals = numpy.concatenate(proposals, axis=2)
fuse_inf.fuse_with(proposals)
proposals = []
if len(proposals) >= 1:
proposals = numpy.concatenate(proposals, axis=2)
fuse_inf.fuse_with(proposals)
if(fuse_inf.best_e >= best_e):
break
print("shiftey done ")
else:
print("shiftey")
# shift
ps = 2
for i in range(10):
print("Si",i)
proposals = []
best_e = float(fuse_inf.best_e)
padded = numpy.pad(fuse_inf.best_l+1, ps+1, mode='constant', constant_values=0)
for x in range(-ps,ps):
for y in range(-ps,ps):
labels = padded[
ps + x : ps + x + shape[0],
ps + y : ps + y + shape[1]
]
#labels = nifty.segmentation.connectedComponents(prop)
proposals.append(labels)
shuffle(proposals)
for labels in proposals:
fuse_inf.fuse_with(labels)
if(fuse_inf.best_e >= best_e):
break
print("shiftey done ")
return fuse_inf.best_l
def solve_pyramid(objective, best_l=None):
G = nifty.graph.UndirectedGraph
CCObj = G.LiftedMulticutObjective
solverFactory = CCObj.liftedMulticutGreedyAdditiveFactory()
fm = nifty.graph.opt.lifted_multicut.PixelWiseLmcConnetedComponentsFusion2D(objective=objective, solver_factory=solverFactory)
shape = objective.shape
best_e = None
if best_l is not None:
best_e = objective.evaluate(best_l)
# make a pyramid
current = objective
pyramid = [current]
#while(current.shape[0]!=64):
# print("jay")
# current = current.downsample_by_two()
# pyramid.append(current)
#pyramid = reversed(pyramid)
old_res = None
for obj in pyramid:
init = None
if old_res is not None:
print(old_res.shape)
print('\n\n\n\n')
init = vigra.sampling.resize(old_res.astype('float32'), obj.shape ,0).astype('int')
old_res = solve_single_scale(obj, init)
res = old_res
return res
def affinities_to_weights(affinities, offsets, beta=0.5):
eps = 0.00001
affinities = numpy.clip(affinities, eps, 1.0-eps)
weights = numpy.log((1.0-affinities)/(affinities)) + numpy.log((1.0-beta)/(beta))
return weights
def affinities_to_better_weights(affinities, offsets, beta=0.5):
weights = affinities.copy()
eps = 0.00001
affinities = numpy.clip(affinities, eps, 1.0-eps)
weights = numpy.log((1.0-affinities)/(affinities)) + numpy.log((1.0-beta)/(beta))
# long range
weights[:,:,:] = -1.0*(affinities[:,:,:]-0.5)
# local weighs
weights[:,:,0] = 1.0 - affinities[:,:,0]
weights[:,:,1] = 1.0 - affinities[:,:,1]
weights *= numpy.sum(offsets**2,1)**0.5
return weights
def affinities_lmc(raw, affinities, offsets, beta=0.5):
# convert affinities to weights
weights = affinities_to_better_weights(affinities=affinities, offsets=offsets, beta=0.5)
#w = numpy.sum(offsets**2,axis=1)
#weights *= w
#weights[:,:,0] = 0
#weights[:,:,1] = 0
objective = PlmcObjective2D(raw=raw, affinities=affinities, weights=weights, offsets=offsets)
return solve_pyramid(objective)
if __name__ == "__main__":
# load weighs and raw
path_affinities = "/home/tbeier/nice_p/isbi_test_default.h5"
#path_affinities = "/home/tbeier/nice_probs/isbi_test_default.h5"
offsets = numpy.array([
[-1,0],[0,-1],
[-9,0],[0,-9],[-9,-9],[9,-9],
[-9,-4],[-4,-9],[4,-9],[9,-4],
[-27,0],[0,-27],[-27,-27],[27,-27]
])
import h5py
f5_affinities = h5py.File(path_affinities)
affinities = f5_affinities['data']
z = 8
# get once slice
affinities = numpy.rollaxis(affinities[:,z,:,:],0,3)
affinities = numpy.require(affinities, requirements=['C'])
import skimage.io
#raw_path = "/home/tbeier/src/nifty/src/python/examples/multicut/NaturePaperDataUpl/ISBI2012/raw_test.tif"
raw_path = '/home/tbeier/src/nifty/mysandbox/NaturePaperDataUpl/ISBI2012/raw_test.tif'
raw = skimage.io.imread(raw_path)
raw = raw[z,:,:]
#raw = raw[200:64+200, 200:64+200]
#affinities = affinities[200:64+200, 200:64+200,:]
#t = 0.2
#affinities[affinities >= t ] = 1
#affinities[affinities < t ] = 0
print(raw.shape, affinities.shape)
if False:
import matplotlib.pyplot as plt
for x in range(offsets.shape[0]):
fig = plt.figure()
ax1 = fig.add_subplot(2,1,1)
ax1.imshow(raw)
ax2 = fig.add_subplot(2,1,2)
ax2.imshow(affinities[:,:,x])
plt.show()
sys.exit()
res = affinities_lmc(raw=raw, affinities=affinities, offsets=offsets, beta=0.5)
plt.imshow(nifty.segmentation.segmentOverlay(raw, res, showBoundaries=False))
plt.show()
plt.imshow(nifty.segmentation.markBoundaries(raw, res, color=(1,0,0)))
plt.show()
|
mit
| 114,205,025,194,031,790
| 27.480631
| 141
| 0.557705
| false
| 3.47897
| false
| false
| false
|
simonmonk/make_action
|
python/experiments/mixing_colors.py
|
2
|
1562
|
from Tkinter import *
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM) # (1)
GPIO.setup(18, GPIO.OUT)
GPIO.setup(23, GPIO.OUT)
GPIO.setup(24, GPIO.OUT)
pwmRed = GPIO.PWM(18, 500) # (2)
pwmRed.start(100)
pwmGreen = GPIO.PWM(23, 500)
pwmGreen.start(100)
pwmBlue = GPIO.PWM(24, 500)
pwmBlue.start(100)
class App:
def __init__(self, master): #(3)
frame = Frame(master) #(4)
frame.pack()
Label(frame, text='Red').grid(row=0, column=0) # (5)
Label(frame, text='Green').grid(row=1, column=0)
Label(frame, text='Blue').grid(row=2, column=0)
scaleRed = Scale(frame, from_=0, to=100, # (6)
orient=HORIZONTAL, command=self.updateRed)
scaleRed.grid(row=0, column=1)
scaleGreen = Scale(frame, from_=0, to=100,
orient=HORIZONTAL, command=self.updateGreen)
scaleGreen.grid(row=1, column=1)
scaleBlue = Scale(frame, from_=0, to=100,
orient=HORIZONTAL, command=self.updateBlue)
scaleBlue.grid(row=2, column=1)
def updateRed(self, duty): # (7)
# change the led brightness to match the slider
pwmRed.ChangeDutyCycle(float(duty))
def updateGreen(self, duty):
pwmGreen.ChangeDutyCycle(float(duty))
def updateBlue(self, duty):
pwmBlue.ChangeDutyCycle(float(duty))
root = Tk() # (8)
root.wm_title('RGB LED Control')
app = App(root)
root.geometry("200x150+0+0")
try:
root.mainloop()
finally:
print("Cleaning up")
GPIO.cleanup()
|
mit
| -5,904,124,153,999,706,000
| 25.05
| 60
| 0.608195
| false
| 3.021277
| false
| false
| false
|
jaracil/nxpy
|
pynexus/pynexus.py
|
1
|
17276
|
# -*- coding: utf-8 -*-
##############################################################################
#
# pynexus, a Python library for easy playing with Nexus
# Copyright (C) 2016 by the pynexus team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import atexit
from .jsocket import JSocketDecoder
import json
import multiprocessing
try:
from queue import Queue
except ImportError:
from Queue import Queue
from . import net
import select
import threading
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
import time
from .version import __version__
# Constants
ErrParse = -32700
ErrInvalidRequest = -32600
ErrMethodNotFound = -32601
ErrInvalidParams = -32602
ErrInternal = -32603
ErrTimeout = -32000
ErrCancel = -32001
ErrInvalidTask = -32002
ErrInvalidPipe = -32003
ErrInvalidUser = -32004
ErrUserExists = -32005
ErrPermissionDenied = -32010
ErrTtlExpired = -32011
ErrUnknownError = -32098
ErrNotSupported = -32099
ErrConnClosed = -32007
ErrStr = {
ErrParse: "Parse error",
ErrInvalidRequest: "Invalid request",
ErrMethodNotFound: "Method not found",
ErrInvalidParams: "Invalid params",
ErrInternal: "Internal error",
ErrTimeout: "Timeout",
ErrCancel: "Cancel",
ErrInvalidTask: "Invalid task",
ErrInvalidPipe: "Invalid pipe",
ErrInvalidUser: "Invalid user",
ErrUserExists: "User already exists",
ErrPermissionDenied: "Permission denied",
ErrTtlExpired: "TTL expired",
ErrUnknownError: "Unknown error",
ErrNotSupported: "Not supported",
ErrConnClosed: "Connection is closed",
}
class NexusConn(object):
def pushRequest(self, request):
self.requests[1].send(request)
return None
def pullRequest(self):
return self.requests[0].recv(), None
def registerChannel(self, task_id, channel):
with self.resTableLock:
self.resTable[task_id] = channel
def getChannel(self, task_id):
res = None
with self.resTableLock:
res = self.resTable.get(task_id)
return res
def unregisterChannel(self, task_id):
with self.resTableLock:
if task_id in self.resTable:
del self.resTable[task_id]
def cancelChannels(self):
with self.resTableLock:
for channel in self.resTable.values():
channel.put({u'jsonrpc': u'2.0', u'id': None, u'error': {u'code': ErrConnClosed, u'message': ErrStr[ErrConnClosed]}})
def getTimeToNextPing(self):
now = time.time()
return self.lastRead + self.keepAlive - now
def resetTimeToNextPing(self):
self.lastRead = time.time()
def mainWorker(self, pipe):
try:
while True:
delay = self.getTimeToNextPing()
ready = select.select([pipe[0]], [], [], delay)
if ready[0] and ready[0][0] == pipe[0]:
break
else:
delay = self.getTimeToNextPing()
if delay <= 0:
error = self.ping(self.keepAlive)
if error:
raise Exception("Error in ping", error)
finally:
self.cancel()
def sendWorker(self, pipe):
try:
while True:
ready = select.select([self.requests[0], pipe[0]], [], [])
if ready[0]:
if ready[0][0] == pipe[0]:
break
else:
request, error = self.pullRequest()
if error:
break
request['jsonrpc'] = '2.0'
with self.connLock:
self.conn.send(json.dumps(request).encode())
finally:
self.cancel()
def recvWorker(self, pipe):
try:
decoder = JSocketDecoder(self.conn)
while True:
ready = select.select([decoder, pipe[0]], [], [])
if ready[0]:
if ready[0][0] == pipe[0]:
break
else:
message = decoder.getObject()
self.resetTimeToNextPing()
if message:
channel = self.getChannel(message['id'])
if channel:
channel.put(message)
finally:
self.cancel()
def newId(self, taskId=None):
new_id = taskId
if not new_id:
self.lastTaskId += 1
new_id = self.lastTaskId
new_channel = Queue()
self.registerChannel(new_id, new_channel)
return new_id, new_channel
def delId(self, task_id):
self.unregisterChannel(task_id)
def __init__(self, conn, keepAlive=60):
self.conn = conn
self.connLock = threading.Lock()
self.requests = multiprocessing.Pipe(False)
self.keepAlive = keepAlive
self.resTable = {}
self.resTableLock = threading.Lock()
self.lastTaskId = 0
self.workers = []
self.lastRead = time.time()
self._stopping = False
self._stoppingLock = threading.Lock()
self.startWorker(self.sendWorker)
self.startWorker(self.recvWorker)
self.startWorker(self.mainWorker)
atexit.register(self.cancel)
def startWorker(self, target):
pipe = multiprocessing.Pipe(False)
worker = threading.Thread(target=target, args=(pipe,))
worker.daemon = True
worker.start()
self.workers.append((worker, pipe))
def cancel(self):
with self._stoppingLock:
if self._stopping:
return False
self._stopping = True
# Cancel pull requests
self.cancelChannels()
# Stop workers
for worker, pipe in self.workers:
if worker != threading.current_thread():
pipe[1].send("exit")
worker.join()
self.workers = []
return True
def executeNoWait(self, method, params, taskId=None):
with self._stoppingLock:
if self._stopping:
return 0, None, {u'code': ErrConnClosed, u'message': ErrStr[ErrConnClosed]}
task_id, channel = self.newId(taskId=taskId)
req = {
'id': task_id,
'method': method,
'params': params,
}
err = self.pushRequest(req)
if err:
self.delId(task_id)
return 0, None, err
return task_id, channel, None
def execute(self, method, params, taskId=None):
task_id, channel, err = self.executeNoWait(method, params, taskId=taskId)
if err:
return None, err
res = channel.get()
self.delId(task_id)
if 'error' in res:
return None, res['error']
else:
return res['result'], None
def ping(self, timeout):
task_id, channel, err = self.executeNoWait('sys.ping', None)
if err:
return err
try:
channel.get(True, timeout)
self.delId(task_id)
return None
except Exception as e:
self.delId(task_id)
return e
def login(self, username, password):
return self.execute('sys.login', {'user': username, 'pass': password})
def taskPush(self, method, params, timeout=0, priority=0, ttl=0, detach=False):
message = {
'method': method,
'params': params,
}
if priority != 0:
message['prio'] = priority
if ttl != 0:
message['ttl'] = ttl
if detach:
message['detach'] = True
if timeout > 0:
message['timeout'] = timeout
return self.execute('task.push', message)
def taskPushCh(self, method, params, timeout=0, priority=0, ttl=0, detach=False):
resQueue = Queue()
errQueue = Queue()
def callTaskPush():
res, err = self.taskPush(method, params, timeout=timeout, priority=priority, ttl=ttl, detach=detach)
if err:
errQueue.put(err)
else:
resQueue.put(res)
threading.Thread(target=callTaskPush).start()
return resQueue, errQueue
def taskPull(self, prefix, timeout=0, taskId=None):
message = {'prefix': prefix}
if timeout > 0:
message['timeout'] = timeout
res, err = self.execute('task.pull', message, taskId=taskId)
if err:
return None, err
task = Task(
self,
res['taskid'],
res['path'],
res['method'],
res['params'],
res['tags'],
res['prio'],
res['detach'],
res['user']
)
return task, None
def cancelPull(self, taskId):
return self.execute('task.cancel', {'id': taskId})
def taskPullCh(self, prefix, timeout=0):
resQueue = Queue()
errQueue = Queue()
def callTaskPull():
task, err = self.taskPull(prefix, timeout=timeout)
if err:
errQueue.put(err)
else:
resQueue.put(res)
threading.Thread(target=callTaskPull).start()
return resQueue, errQueue
def userCreate(self, username, password):
return self.execute('user.create', {'user': username, 'pass': password})
def userDelete(self, username):
return self.execute('user.delete', {'user': username})
def userSetTags(self, username, prefix, tags):
return self.execute('user.setTags', {'user': username, 'prefix': prefix, 'tags': tags})
def userDelTags(self, username, prefix, tags):
return self.execute('user.delTags', {'user': username, 'prefix': prefix, 'tags': tags})
def userSetPass(self, username, password):
return self.execute('user.setPass', {'user': username, 'pass': password})
def pipeOpen(self, pipeId):
return Pipe(self, pipeId), None
def pipeCreate(self, length = -1):
par = {}
if length > 0:
par["len"] = length
res, err = self.execute("pipe.create", par)
if err:
return None, err
return self.pipeOpen(res["pipeid"])
def topicSubscribe(self, pipe, topic):
return self.execute('topic.sub', {'pipeid': pipe.pipeId, 'topic': topic})
def topicUnsubscribe(self, pipe, topic):
return self.execute('topic.unsub', {'pipeid': pipe.pipeId, 'topic': topic})
def topicPublish(self, topic, message):
return self.execute('topic.pub', {'topic': topic, 'msg': message})
def lock(self, name):
res, err = self.execute('sync.lock', {'lock': name})
if err:
return None, err
else:
return bool(res['ok']), None
def unlock(self, name):
res, err = self.execute('sync.unlock', {'lock': name})
if err:
return None, err
else:
return bool(res['ok']), None
def _getNexusVersion(self):
res, err = self.execute("sys.version", None)
if err == None and isinstance(res, dict) and "version" in res and isinstance(res["version"], str):
return res["version"]
return "0.0.0"
class Client(NexusConn):
def __init__(self, url, keepAlive=60):
nexusURL = urlparse(url)
self.hostname = nexusURL.hostname
self.port = nexusURL.port
self.scheme = nexusURL.scheme
self.username = nexusURL.username
self.password = nexusURL.password
self.is_logged = False
self.login_error = None
self.connid = None
self.nexus_version = "0.0.0"
self.is_version_compatible = False
self._closing = False
self._closingLock = threading.Lock()
self.socket = net.connect(self.hostname, self.port, self.scheme)
super(Client, self).__init__(self.socket, keepAlive=keepAlive)
self.nexusConn = self # for backward compatibility
err = self.ping(20)
if err != None:
raise Exception(err)
if self.username != None and self.password != None:
self.login()
self.nexus_version = self._getNexusVersion()
self.is_version_compatible = self.nexus_version.split(".")[0] == __version__.split(".")[0]
atexit.register(self.close)
def login(self):
res, err = super(Client, self).login(self.username, self.password)
if err:
self.is_logged = False
self.login_error = err
self.connid = None
else:
self.is_logged = True
self.login_error = None
self.connid = res['connid']
def close(self):
with self._closingLock:
if self._closing:
return False
self._closing = True
self.cancel()
if self.socket:
self.socket.close()
self.socket = None
class Task(object):
def __init__(self, nexusConn, taskId, path, method, params, tags, priority, detach, user):
self.nexusConn = nexusConn
self.taskId = taskId
self.path = path
self.method = method
self.params = params
self.tags = tags
self.priority = priority
self.detach = detach
self.user = user
def sendResult(self, result):
params = {
'taskid': self.taskId,
'result': result,
}
return self.nexusConn.execute('task.result', params)
def sendError(self, code, message, data):
if code < 0:
if code in ErrStr:
if message != "":
message = "%s:[%s]" % (ErrStr[code], message)
else:
message = ErrStr[code]
params = {
'taskid': self.taskId,
'code': code,
'message': message,
'data': data,
}
return self.nexusConn.execute('task.error', params)
def reject(self):
"""
Reject the task. Task is returned to Nexus tasks queue.
"""
params = {
'taskid': self.taskId,
}
return self.nexusConn.execute('task.reject', params)
def accept(self):
"""
Accept a detached task.
"""
return self.sendResult(None)
class Pipe(object):
def __init__(self, nexusConn, pipeId):
self.nexusConn = nexusConn
self.pipeId = pipeId
def close(self):
return self.nexusConn.execute("pipe.close", {"pipeid": self.pipeId})
def write(self, msg):
return self.nexusConn.execute("pipe.write", {"pipeid": self.pipeId, "msg": msg})
def read(self, mx, timeout=0):
par = {"pipeid": self.pipeId, "max": mx, "timeout": timeout}
res, err = self.nexusConn.execute("pipe.read", par)
if err:
return None, err
try:
msgres = []
for msg in res["msgs"]:
msgres.append(Msg(msg["count"], msg["msg"]))
except:
return None, {u'code': ErrInternal, u'message': ErrStr[ErrInternal]}
return PipeData(msgres, res["waiting"], res["drops"]), None
def listen(self, channel=None):
if channel is None:
channel = Queue()
def pipeReader():
try:
while True:
data, err = self.read(100000)
if err:
break
for message in data.msgs:
channel.put(message)
except:
pass
threading.Thread(target=pipeReader).start()
return channel
def id(self):
return self.pipeId
class Msg(object):
def __init__(self, count, msg):
self.count = count
self.msg = msg
class PipeData(object):
def __init__(self, msgs, waiting, drops):
self.msgs = msgs
self.waiting = waiting
self.drops = drops
class PipeOpts(object):
def __init__(self, length):
self.length = length
|
lgpl-3.0
| 8,857,020,541,984,786,000
| 29.685613
| 133
| 0.539998
| false
| 4.142926
| false
| false
| false
|
kwiecien/ppl
|
ppl/scripts/face_paths.py
|
1
|
2370
|
#!/usr/bin/env python
import datetime
import rospy
from geometry_msgs.msg import PointStamped
from people_msgs.msg import People
from tf import ExtrapolationException, LookupException, TransformListener
FILE = file
RECORDED_PEOPLE = dict()
def listener():
global transform_listener
transform_listener = TransformListener()
rospy.Subscriber(
"/face_people",
People,
callbackPplPaths,
queue_size=1)
rospy.spin()
def callbackPplPaths(people_msg):
writeToFile(people_msg)
def createFile():
global FILE
time = datetime.datetime.now()
name = "/home/krzysztof/catkin_ws/src/ppl/paths/" + \
"face_" + '{:%Y-%m-%d-%H-%M-%S}'.format(time) + ".dat"
FILE = open(name, 'w')
def writeToFile(people_msg):
if len(people_msg.people) == 0:
return
writeTime(people_msg.header.stamp)
writeTime(countMeasuredTime(people_msg.header.stamp))
updatePeoplePositions(people_msg)
writePeoplePositions()
def writeTime(time):
FILE.write(str(time))
FILE.write('\t')
def countMeasuredTime(timestamp):
time = timestamp.to_sec()
time = round(time, 2)
return time
def updatePeoplePositions(people_msg):
global transform_listener
for person in RECORDED_PEOPLE:
RECORDED_PEOPLE[person] = ['"1/0"', '"1/0"']
for person in people_msg.people:
point = PointStamped()
point.header = people_msg.header
point.point = person.position
try:
base_link_point = transform_listener.transformPoint("base_link", point)
if person.name not in RECORDED_PEOPLE:
RECORDED_PEOPLE[person.name] = []
RECORDED_PEOPLE[person.name] = [base_link_point.point.x, base_link_point.point.y]
except (LookupException, ExtrapolationException):
continue
def writePeoplePositions():
i = 1
for person in RECORDED_PEOPLE:
writePosition(RECORDED_PEOPLE[person], i)
i += 1
FILE.write('\n')
print "------------------------------------"
def writePosition(position, i):
x = position[0]
y = position[1]
print "Person", i, "[x, y]", x, y
FILE.write(str(y))
FILE.write('\t')
FILE.write(str(x))
FILE.write('\t')
if __name__ == '__main__':
rospy.init_node('face_paths', anonymous=False)
createFile()
listener()
|
gpl-3.0
| 6,371,509,571,875,838,000
| 23.6875
| 93
| 0.62616
| false
| 3.410072
| false
| false
| false
|
CICIC/gestioCIimporter
|
cleanCSVdata.py
|
1
|
2732
|
# coding: utf8
import re
import logging
def cleanDate(date):
"Clean date format from yyyy[/]mm[/]dd"
date = date.split(' ')[0]
if date != '':
try:
query = r'([0-9]|0[1-9]|[12][0-9]|3[01])/([0-9]|0[1-9]|1[012])/((19|20)[0-9][0-9]|1[0-9])'
date = re.match(query, date).group(0)
date = date.split('/')
if len(date[2])==2:
date[2] = '20' + date[2]
date = date[2] + '-' + date[1] + '-' + date[0]
except AttributeError:
date = None
else:
date = None
return date
def minicleanDate(date):
return date.split(' ')[0]
def cleanPhone(phone):
"Clean phone date, only spain numbers"
phone = phone.replace(' ', '')
phone = phone.replace('.', '')
phone = phone.replace('-', '')
phone = phone.replace('+34', '')
if re.match(r"0034", phone):
phone = phone[4:]
phone = phone[0:9]
if not re.match(r"[0-9]{9}", phone) and len(phone) > 9:
phone = None
return phone
def cleanPostalcode(postalcode):
if re.match(r"[0-9]{4}", postalcode) and len(postalcode) == 4:
postalcode = '0' + postalcode
if (not re.match(r"[0-9]{5}", postalcode)) or len(postalcode) != 5:
postalcode = None
return postalcode
def cleanCOOPnumber(coopnumber):
coopnumber = coopnumber.replace(' ','')
if re.match(r"COOP[0-9]{4}",coopnumber):
coopnumber = coopnumber[0:8]
else:
coopnumber = ''
return coopnumber
def cleanIDcard(idcard):
idcard = idcard.replace('-','')
idcard = idcard.replace('.','')
idcard = idcard.replace(' ','')
if (not re.match(r"[a-zA-Z][0-9]{8}",idcard) or
not re.match(r"[0-9]{8}[a-zA-Z]",idcard)) and len(idcard) != 9:
idcard = ''
return idcard
def cleanFloat(num):
"Convert 0.000,00 -> 0000.00"
num = num.replace('.','')
num = num.replace(',','.')
if num == '':
num = 0
try:
num = float(num)
except ValueError:
print "Not a float:", num
num = 0.0
return num
def cleanInteger(num):
"In this case only remove the value if it's not an integer"
if num == '':
num = 0
try:
num = int(num)
except ValueError:
print "Not an integer:", num
num=0
return num
def cleanCooperative(coop):
if coop == 'x':
coop = 'X'
if coop == 'i':
coop = 'I'
if coop != 'X' and coop != 'I':
coop = None
return coop
def cleanEmail(email):
"Return a valid email"
em = re.search("(<)?([\w\-_.]+@[\w\-_.]+(?:\.\w+)+)(?(1)>)", email)
if em:
email = em.group(0)
else:
email = ''
return email
|
agpl-3.0
| -2,201,793,729,169,544,200
| 22.756522
| 102
| 0.514275
| false
| 3.104545
| false
| false
| false
|
DLunin/bayescraft
|
graphmodels/generators.py
|
1
|
4075
|
import networkx as nx
import numpy as np
import pandas as pd
import scipy as sp
import scipy.stats as stats
from itertools import *
import pytest
from bayescraft.graphmodels.factors import (TableCPD, MultivariateGaussianDistribution,
ParametricFunctionCPD, LinearGaussianDistribution)
from bayescraft.graphmodels import DGM
import bayescraft.stats as bstats
def names_to_str(g):
result = nx.Graph()
result.add_nodes_from(map(str, g.node()))
result.add_edges_from(map(lambda x: (str(x[0]), str(x[1])), g.edges()))
return result
class AcyclicDiGraphGen:
@staticmethod
def diamond(n_var):
G = nx.DiGraph()
G.add_nodes_from(range(n_var))
G.add_edges_from(zip(repeat(0), range(1, n_var-1)))
G.add_edges_from(zip(range(1, n_var-1), repeat(n_var-1)))
G.add_edge(0, n_var-1)
return G
@staticmethod
def star(n_var):
G = nx.DiGraph()
G.add_nodes_from(range(n_var))
G.add_edges_from([(i, 0) for i in range(1, n_var)])
return G
@staticmethod
def random_gnr(n_var, p=0.2):
return nx.gnr_graph(n_var, p)
@staticmethod
def random_erdos_renyi(n_var, p=0.2):
while True:
G = nx.erdos_renyi_graph(n_var, p, directed=True)
if not nx.is_directed_acyclic_graph(G):
continue
return G
class GraphGen:
@staticmethod
def diamond(n_var):
G = nx.Graph()
G.add_nodes_from(range(n_var))
G.add_edges_from(zip(repeat(0), range(1, n_var-1)))
G.add_edges_from(zip(range(1, n_var-1), repeat(n_var-1)))
G.add_edge(0, n_var-1)
return G
@staticmethod
def star(n_var):
return nx.star_graph(n_var)
@staticmethod
def random_erdos_renyi(n_var, p=0.2):
return nx.erdos_renyi_graph(n_var, p)
class DiscreteModelGenDGM:
@staticmethod
def dirichlet(G, alpha=1):
cpd = {}
for node in nx.topological_sort(G):
m = G.in_degree(node) + 1
dim = tuple([2] * m)
table = stats.dirichlet(alpha=tuple([alpha] * (2 ** m))).rvs()[0]
table = table.reshape(dim)
cpd[node] = TableCPD(table, [node], list(G.predecessors(node)))
return cpd
class ContinuousModelGenDGM:
@staticmethod
def gaussian(G):
cpd = {}
for node in nx.topological_sort(G):
m = G.in_degree(node) + 1
cov = np.random.rand(m, m)
cov = np.dot(cov, cov.T)
d = MultivariateGaussianDistribution(np.zeros(m), cov)
cpd[node] = ParametricFunctionCPD(d, [node] + list(G.predecessors(node)))
return cpd
@staticmethod
def linear_gaussian(G, a_0=1, b_0=1):
cpd = {}
for node in nx.topological_sort(G):
m = G.in_degree(node) + 1
nig = bstats.normal_inverse_gamma(w_0=np.zeros(m), V_0=np.eye(m), a_0=a_0, b_0=b_0)
sample = nig.rvs()
variance = sample[-1]
w = sample[1:-1]
w0 = sample[0]
cpd[node] = ParametricFunctionCPD(LinearGaussianDistribution(w0, w, variance),
[node], list(G.predecessors(node)))
return cpd
def dag_pack():
for n_var in [5, 10, 20]:
yield AcyclicDiGraphGen.diamond(n_var)
for n_var in [5, 10, 20]:
yield AcyclicDiGraphGen.star(n_var)
for p in [0.1, 0.2, 0.3, 0.4, 0.5, 0.9]:
for n_var in [5, 10, 20]:
yield AcyclicDiGraphGen.random_gnr(n_var, p)
for p in [0.1, 0.2, 0.3, 0.4, 0.5, 0.9]:
for n_var in [5, 10, 20]:
yield AcyclicDiGraphGen.random_erdos_renyi(n_var, p)
def dgm_pack():
for dag in dag_pack():
dgm = DGM.from_graph(dag)
dgm.cpd = DiscreteModelGenDGM.dirichlet(dag.copy())
dgm.model = { node : TableCPD for node in dgm.nodes() }
yield dgm
dgm = DGM.from_graph(dag)
dgm.cpd = ContinuousModelGenDGM.linear_gaussian(dgm)
#yield dgm
|
mit
| 4,763,116,395,961,582,000
| 31.094488
| 95
| 0.567853
| false
| 3.031994
| false
| false
| false
|
miikama/telegram-bot
|
bot2.py
|
1
|
4609
|
import urllib
import urllib2
import pprint
import json
import datetime
import time
import logging
from calendar_bot import CalendarClient
'''returns 'TANAAN!!' if today is paapaiva and string for something else
returns None if no paapaiva in next 10 days
'''
def is_paapaiva(client):
#the events from raati15 calendar for the next 10 days
events = []
events = client.get_calendar_events(10)
#print(events)
#events is like [('2016-09-11T12:30:00+03:00', u'test event')]
if events:
#removing aakkoset
ascii_events = [(x[0],x[1].encode('ascii', 'xmlcharrefreplace').replace('ä', 'a') ) for x in events]
#filtering only paapaivat
only_paapaivas = [x for x in ascii_events if 'paa' in x[1].lower() and 'paiva' in x[1].lower() ]
#print(only_paapaivas)
for paiva in only_paapaivas:
#date parsing
stripped_date = paiva[0][0:10]
calendar_date = datetime.datetime.strptime(stripped_date, '%Y-%m-%d')
#if today is paapaiva
now = datetime.datetime.utcnow()
today = now - datetime.timedelta(minutes=now.minute, hours=now.hour, seconds=now.second, microseconds=now.microsecond)
#print(calendar_date)
#print(today)
if calendar_date == today:
return "TANAAN!!"
else:
return "{}".format(stripped_date)
return None
else:
return None
from telegram.error import (TelegramError, Unauthorized, BadRequest, TimedOut, ChatMigrated, NetworkError)
from telegram.ext import CommandHandler
from telegram.ext import Updater
tanaan_photo_address = 'AgADBAADBeY1G5sdZAeZOQAB_xifyPymVaAZAARU0-rzUc8xq5I8AAIC' # 'http://i.imgur.com/2k3j2NA.jpg'
fugee_rooriin_address ='AgADBAADKeI1G1caZAeDNH-tzcHDX8VYoBkABKVGDyIMeSxuQz0AAgI' #'http://i.imgur.com/ykFysmr.jpg'
ei_tanaan_address = 'AgADBAADLNM1GxUdZAfdLhEdfQINz65boBkABN7nsRV8UWIQwSAAAgI' #'http://i.imgur.com/nxkzkpW.jpg'
calendar_id = '2a668f5qv3pmvn251mviqlc6vk@group.calendar.google.com' #id for raati 15 calendar
calendar_client = CalendarClient(calendar_id)
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',level=logging.INFO)
botid = '268119392:AAErkOPlFBVJIG7Yc_L2m-IzRA0f67tz7qg'
test_botid = '301043923:AAE0VP2x_wWV70s-Yvz3N4_InhG0ShIGhyA'
updater = Updater(token=botid)
dispatcher = updater.dispatcher
#starting
def start(bot, update):
bot.sendMessage(chat_id=update.message.chat_id, text="I'm a bot, please talk to me!")
def stop(bot, update):
updater.stop()
updater.idle()
#paapaiva
def paapaiva(bot, update):
paapaiva = is_paapaiva(calendar_client)
if paapaiva:
bot.sendMessage(chat_id=update.message.chat_id, text=("Seuraava PAAPAIVA on:\n" + paapaiva) )
if paapaiva == "TANAAN!!":
bot.sendPhoto(chat_id=update.message.chat_id, photo=tanaan_photo_address)
else:
bot.send_message(chat_id=update.message.chat_id, text=("Seuraava PAAPAIVA on:\n" + "Ei PAAPAIVAA seuraavaan 10 paivaan :(") )
#fugee
def fugee(bot, update):
msg = bot.sendPhoto(chat_id=update.message.chat_id, photo=fugee_rooriin_address)
#ei
def ei(bot, update):
msg = bot.sendPhoto(chat_id=update.message.chat_id, photo=ei_tanaan_address)
#pprint.pprint("sent photo id: " + msg.photo[0].file_id)
#error handling
def error_callback(bot, update, error):
try:
raise error
except Unauthorized:
print("unauthorized") # remove update.message.chat_id from conversation list
except BadRequest:
print("Badrequest") # handle malformed requests - read more below!
except TimedOut:
print("TimedOut") # handle slow connection problems
except NetworkError:
print("netwrokError") # handle other connection problems
except ChatMigrated as e:
print("chatmigrated") # the chat_id of a group has changed, use e.new_chat_id instead
except TelegramError:
print("telegramerror") # handle all other telegram related errors
start_handler = CommandHandler('start', start)
stop_handler = CommandHandler('stop', stop)
paapaiva_handler = CommandHandler('paapaiva', paapaiva)
fugee_handler = CommandHandler('fugee', fugee)
ei_handler = CommandHandler('ei', ei)
dispatcher.add_handler(start_handler) #handler '/start'
dispatcher.add_handler(stop_handler)
dispatcher.add_handler(paapaiva_handler) #handle '/paapaiva'
dispatcher.add_handler(fugee_handler)
dispatcher.add_handler(ei_handler)
dispatcher.add_error_handler(error_callback) #error handler
updater.start_polling(poll_interval = 2.0, clean = True)
#curl -s -X POST "https://api.telegram.org/bot268119392:AAErkOPlFBVJIG7Yc_L2m-IzRA0f67tz7qg/sendPhoto" -F chat_id=89456514 -F photo="http://i.imgur.com/2k3j2NA.jpg"
|
mit
| 1,012,354,969,156,449,700
| 34.453846
| 164
| 0.73248
| false
| 2.66879
| false
| false
| false
|
mfkaptan/biyos-app
|
biyos/biyos.py
|
1
|
13470
|
import cookielib
import urllib
import urllib2
import sys
import biyosui
import base64 as b64
import re
from docx import Document
from docx.shared import Inches
from openpyxl import load_workbook
from PyQt4 import QtGui
from bs4 import BeautifulSoup
from math import ceil
# No, Blok, Daire
kiraci = [[7710, "A", 6]]
DOGALGAZ_BIRIM = 12.5
SICAKSU_BIRIM = 6.25
class BiyosApp(QtGui.QMainWindow, biyosui.Ui_MainWindow):
def __init__(self, parent=None):
super(BiyosApp, self).__init__(parent)
self.setupUi(self)
self.dogalgaz_birim_in.setValue(DOGALGAZ_BIRIM)
self.su_birim_in.setValue(SICAKSU_BIRIM)
self.kalori_hesap_button.clicked.connect(self.kalori_hesap)
self.sayac_veri_button.clicked.connect(self.sayac_verileri)
self.apartman_aidat_button.clicked.connect(self.apartman_aidat)
self.tum_borclar_button.clicked.connect(self.tum_borclar)
self.tek_borc_button.clicked.connect(self.tek_borc)
self.login()
def login(self):
with open('login/log.in', 'r') as f:
self.email = b64.decodestring(f.readline().strip())
self.password = b64.decodestring(f.readline().strip())
self.cj = cookielib.CookieJar()
self.opener = urllib2.build_opener(
urllib2.HTTPRedirectHandler(),
urllib2.HTTPHandler(debuglevel=0),
urllib2.HTTPSHandler(debuglevel=0),
urllib2.HTTPCookieProcessor(self.cj)
)
self.opener.addheaders = [('User-agent', ('Mozilla/4.0 (compatible; MSIE 6.0; '
'Windows NT 5.2; .NET CLR 1.1.4322)'))]
# need this twice - once to set cookies, once to log in...
self._login()
self._login()
self.giris_button.setStyleSheet('QPushButton {background-color: #00FF00; color: black;}')
self.giris_button.setText(self.email + ' adresi ile giris yapildi!')
def _login(self):
"""
Handle login. This should populate our cookie jar.
"""
login_data = urllib.urlencode({
'email': self.email,
'password': self.password,
})
response = self.opener.open("https://app.biyos.net/login.php", login_data)
def sayac_verileri(self):
self.dogalgaz_birim = float(self.dogalgaz_birim_in.value())
self.su_birim = float(self.su_birim_in.value())
su = self.get_page('https://app.biyos.net/385/yonetim/sayaclar/sicaksu')
self.su_toplam = self.get_sayac_toplam(su)
self.su_toplam_disp.setText(str(self.su_toplam))
kalori = self.get_page('https://app.biyos.net/385/yonetim/sayaclar/kalorimetre')
self.kalori_toplam = self.get_sayac_toplam(kalori)
self.kalori_toplam_disp.setText(str(self.kalori_toplam))
self.kalori_ortalama = self.kalori_toplam / 48.0
self.kalori_ortalama_disp.setText(str("%.2f" % self.kalori_ortalama))
self.sayac_veri_button.setStyleSheet('QPushButton {background-color: #00FF00; color: black;}')
self.sayac_veri_button.setText('Veriler gosteriliyor')
def kalori_hesap(self):
self.sayac_verileri()
self.dogalgaz_birim = float(self.dogalgaz_birim_in.value())
self.su_birim = float(self.su_birim_in.value())
fatura = float(self.fatura_in.value())
if fatura == 0:
self.kalori_hesap_button.setStyleSheet('QPushButton {background-color: #FF0000; color: black;}')
self.kalori_hesap_button.setText('Fatura girip tekrar deneyin!')
return
su_fark = (self.dogalgaz_birim - self.su_birim) * self.su_toplam
son_fiyat = fatura - su_fark
self.son_fiyat_disp.setText(str("%.2f" % son_fiyat))
ortak_gider = (son_fiyat * 3.) / 480.
aidat = 200. - ortak_gider
self.ortak_gider_disp.setText(str("%.2f" % ortak_gider))
self.aidat_disp.setText(str("%.2f" % aidat))
self.kalori_hesap_button.setStyleSheet('QPushButton {background-color: #00FF00; color: black;}')
self.kalori_hesap_button.setText('Hesaplandi!')
def _get_rows(self, html, attr=None):
if attr is None:
attr = "table"
table = html.find('table', attrs={'class': attr})
body = table.find('tbody')
rows = body.find_all('tr')
return rows
def get_sayac_toplam(self, html):
rows = self._get_rows(html)
total = 0
for r in rows:
cols = r.find_all('td')
total += int(cols[-1].text)
return total
def get_page(self, url):
try:
resp = self.opener.open(url)
return BeautifulSoup(resp.read(), "lxml")
except Exception as e:
raise e
return
def apartman_aidat(self):
self.sayac_verileri()
dogalgaz_link = self.paylasim_link_in.value()
if dogalgaz_link != 0:
url = 'https://app.biyos.net/385/raporlar/paylasimlar/' + str(dogalgaz_link)
else:
url = None
su_rows = []
kalori_rows = []
title = ""
try:
su = self.get_page('https://app.biyos.net/385/yonetim/sayaclar/sicaksu')
su_rows = self._get_rows(su)
if url is None:
title = "2016 - "
else:
kalori = self.get_page(url)
section = kalori.body.find('section', attrs={'class': 'rapor'})
title = section.find('h4', attrs={'class': 'pull-left'}).get_text()
yil = title.split('-')[0].strip()
ay = title.split('-')[1].strip().split(' ')[0].strip()
title = yil + ' - ' + ay
kalori_rows = self._get_rows(kalori)
except Exception as e:
print e
self.apartman_aidat_button.setStyleSheet('QPushButton {background-color: #FF0000; color: white;}')
self.apartman_aidat_button.setText('Yazdirma basarisiz, linki kontrol edin!')
return
try:
self.wb = load_workbook('aidat/template/aidat.xlsx')
ws = self.wb.active
ws.title = title
ws['C1'] = ws['C29'] = title
self._set_xlsx(ws, su_rows, kalori_rows)
self.wb.save(filename='aidat/' + title + ' ISIMLI Aidat.xlsx')
self._remove_names(ws)
self.wb.save(filename='aidat/' + title + ' ISIMSIZ Aidat.xlsx')
except Exception as e:
print e
self.apartman_aidat_button.setStyleSheet('QPushButton {background-color: #FF0000; color: white;}')
self.apartman_aidat_button.setText('Yazdirma basarisiz!')
else:
self.apartman_aidat_button.setStyleSheet('QPushButton {background-color: #00FF00; color: black;}')
self.apartman_aidat_button.setText(title + ' Yazdirildi!')
def _remove_names(self, ws):
for i in range(4, 28):
ws.cell(row=i, column=2).value = 'NO LU Daire'
ws.cell(row=i+28, column=2).value = 'NO LU Daire'
def _set_xlsx(self, ws, su, kalori):
for i in range(48):
r = i + 4
if i >= 24:
r += 4
col = su[i].find_all('td')
ws.cell(row=r, column=2).value = col[2].text
ws.cell(row=r, column=3).value = int(col[5].text)
ws.cell(row=r, column=4).value = su_tl = self.dogalgaz_birim * int(col[5].text)
if len(kalori) == 0:
ws.cell(row=r, column=5).value = 0
ws.cell(row=r, column=6).value = d70 = 0
ws.cell(row=r, column=7).value = d30 = 0
else:
col = kalori[i].find_all('td')
ws.cell(row=r, column=5).value = float(col[6].text.replace(',', '.'))
ws.cell(row=r, column=6).value = d70 = float(col[8].text.replace(',', '.'))
ws.cell(row=r, column=7).value = d30 = float(col[7].text.replace(',', '.'))
aidat = 200. - d30
ws.cell(row=r, column=8).value = aidat
total = su_tl + d70 + d30 + aidat
ws.cell(row=r, column=9).value = ceil(total)
def _single_account(self, no, blok, daire):
html = self.get_page('https://app.biyos.net/385/hesaplar/' + str(no))
hesap = html.body.find('span', attrs={'style': 'font-size:22px;'}).get_text()
head = self.document.add_heading(hesap, level=1)
head.style.paragraph_format.keep_together = True
head.style.paragraph_format.keep_with_next = True
head = self.document.add_heading(blok + " Blok / No: " + str(daire), level=2)
head.style.paragraph_format.keep_together = True
head.style.paragraph_format.keep_with_next = True
try:
data = html.body.find('div', attrs={'class': 'table-responsive'})
geciken = html.body.find('div', attrs={'class': 'detail-payment-item text-danger big-title'})
bakiye = html.body.find('div', attrs={'class': 'detail-payment-item text-warning big-title'})
self.create_table(data, geciken, bakiye)
except AttributeError:
return
def create_table(self, data, geciken, bakiye):
if bakiye:
rows = self._get_rows(data, attr='table table-detail')
tbl = self.document.add_table(rows=0, cols=3)
tbl.autofit = False
tbl.style.paragraph_format.keep_together = True
tbl.style.paragraph_format.keep_with_next = True
tbl.style.paragraph_format.widow_control = True
row_cells = tbl.add_row().cells
row_cells[0].text = "Son Odeme Tarihi"
row_cells[1].text = "Aciklama"
row_cells[2].text = "Tutar"
for r in rows:
row_cells = tbl.add_row().cells
cols = r.find_all('td')
i = 0
for c in cols:
if c.text:
row_cells[i].text = c.text
i += 1
non_decimal = re.compile(r'[^\d.,]+')
row_cells = tbl.add_row().cells
row_cells[1].text = "Toplam Borc"
row_cells[2].text = non_decimal.sub('', bakiye.get_text())
tbl.columns[0].width = Inches(1.5)
tbl.columns[1].width = Inches(50)
tbl.columns[2].width = Inches(0.5)
else:
self.document.add_heading("Odenmemis borcunuz bulunmamaktadir.", level=3)
self.document.add_heading("Gosterdiginiz hassasiyet icin tesekkur ederiz.", level=4)
def tek_borc(self):
blok = None
d = 0
if self.a_blok_in.isChecked():
d = 0
blok = "A"
elif self.b_blok_in.isChecked():
d = 24
blok = "B"
else:
self.tek_borc_button.setStyleSheet('QPushButton {background-color: #FF0000; color: white;}')
self.tek_borc_button.setText('Blok seciniz!')
return
daire = int(self.daire_no_in.value())
hesap = daire + 6148 + d
yazdir = [[hesap, blok, daire]]
for k in kiraci:
if k[1] == blok and k[2] == daire:
yazdir.append(k)
try:
self.document = Document()
for d in yazdir:
self._single_account(*d)
self.document.save('aidat/' + d[1] + '-' + str(d[2]) + ' borc.docx')
except Exception as e:
print e
self.tek_borc_button.setStyleSheet('QPushButton {background-color: #FF0000; color: white;}')
self.tek_borc_button.setText('Yazdirma basarisiz!')
else:
self.tek_borc_button.setStyleSheet('QPushButton {background-color: #00FF00; color: black;}')
self.tek_borc_button.setText('Basarili!\nBaska Yazdir')
def tum_borclar(self):
self.tum_borclar_button.setText('Yazdiriliyor, lutfen bekleyin...')
try:
self.document = Document()
bar = "".join(['_'] * 78)
daire = 1
blok = "A"
for i in range(6149, 6197):
print blok, daire
p = self.document.add_paragraph()
p.add_run(bar).bold = True
p.style.paragraph_format.keep_together = True
p.style.paragraph_format.keep_with_next = True
self._single_account(i, blok, daire)
daire += 1
if daire == 25:
daire = 1
blok = "B"
for k in kiraci:
p = self.document.add_paragraph()
p.style.paragraph_format.keep_together = True
p.style.paragraph_format.keep_with_next = True
p.add_run(bar).bold = True
self._single_account(*k)
self.document.save('aidat/Tum borclar.docx')
except Exception as e:
print e
self.tum_borclar_button.setStyleSheet('QPushButton {background-color: #FF0000; color: white;}')
self.tum_borclar_button.setText('Yazdirma basarisiz!')
else:
self.tum_borclar_button.setStyleSheet('QPushButton {background-color: #00FF00; color: black;}')
self.tum_borclar_button.setText('Yazdirma basarili!')
def main():
app = QtGui.QApplication(sys.argv)
biyos = BiyosApp()
biyos.show()
app.exec_()
if __name__ == '__main__':
main()
|
mit
| 9,147,751,380,979,631,000
| 36.313019
| 110
| 0.558129
| false
| 3.288574
| false
| false
| false
|
GoogleCloudPlatform/PerfKitBenchmarker
|
tests/providers/aws/aws_vpc_endpoint_test.py
|
1
|
4111
|
# Copyright 2019 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for perfkitbenchmarker.providers.aws.aws_vpc_endpoint."""
import unittest
from absl import flags
import mock
from perfkitbenchmarker.providers.aws import aws_vpc_endpoint
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
SERVICE_NAME = 's3'
REGION = 'us-west-1'
FULL_SERVICE_NAME = 'com.amazonaws.{}.s3'.format(REGION)
VPC_ID = 'vpc-1234'
ENDPOINT_ID = 'vpce-1234'
ROUTE_TABLE_ID = 'rtb-1234'
CREATE_RES = {'VpcEndpoint': {'VpcEndpointId': ENDPOINT_ID}}
DELETE_RES = {'Unsuccessful': []}
QUERY_ENDPOINTS_CMD = [
'describe-vpc-endpoints', '--filters',
'Name=service-name,Values={}'.format(FULL_SERVICE_NAME),
'Name=vpc-id,Values={}'.format(VPC_ID), '--query',
'VpcEndpoints[].VpcEndpointId'
]
DESCRIBE_ROUTES_CMD = [
'describe-route-tables', '--filters',
'Name=vpc-id,Values={}'.format(VPC_ID), '--query',
'RouteTables[].RouteTableId'
]
CREATE_ENDPOINT_CMD = [
'create-vpc-endpoint', '--vpc-endpoint-type', 'Gateway', '--vpc-id', VPC_ID,
'--service-name', FULL_SERVICE_NAME, '--route-table-ids', ROUTE_TABLE_ID
]
DELETE_ENDPOINT_CMD = [
'delete-vpc-endpoints', '--vpc-endpoint-ids', ENDPOINT_ID
]
class AwsVpcS3EndpointTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(AwsVpcS3EndpointTest, self).setUp()
self.mock_vpc = mock.Mock()
self.mock_vpc.region = REGION
self.mock_run_cmd = self.enter_context(
mock.patch.object(aws_vpc_endpoint.AwsVpcS3Endpoint, '_RunCommand'))
def _InitEndpoint(self, vpc_id):
self.mock_vpc.id = vpc_id
return aws_vpc_endpoint.CreateEndpointService(SERVICE_NAME, self.mock_vpc)
def testEndPointIdNoVpc(self):
# initialize with no VPC means no immediate lookups done
endpoint = self._InitEndpoint(None)
self.assertIsNone(endpoint.id)
endpoint._RunCommand.assert_not_called()
def testEndPointIdHasVpc(self):
# initialize with a VPC does an immediate call to find existing endpoints
endpoint = self._InitEndpoint(VPC_ID)
self.assertIsNone(endpoint.id, 'Endpoint id always None on initialization')
self.mock_run_cmd.reset_mock()
self.mock_run_cmd.side_effect = [[ENDPOINT_ID]]
self.assertEqual(ENDPOINT_ID, endpoint.endpoint_id)
endpoint._RunCommand.assert_called_with(QUERY_ENDPOINTS_CMD)
def testCreate(self):
# shows that a call to .Create() will get the routing table info followed
# by the create-vpc-endpoint call
endpoint = self._InitEndpoint(VPC_ID)
self.mock_run_cmd.reset_mock()
self.mock_run_cmd.side_effect = [
[], # query for endpoint id
[ROUTE_TABLE_ID], # query for route tables
CREATE_RES, # _Create()
[ENDPOINT_ID], # _Exists()
]
endpoint.Create()
calls = endpoint._RunCommand.call_args_list
self.assertEqual(mock.call(QUERY_ENDPOINTS_CMD), calls[0])
self.assertEqual(mock.call(DESCRIBE_ROUTES_CMD), calls[1])
self.assertEqual(mock.call(CREATE_ENDPOINT_CMD), calls[2])
self.assertEqual(mock.call(QUERY_ENDPOINTS_CMD), calls[3])
self.assertEqual(ENDPOINT_ID, endpoint.id)
def testDelete(self):
endpoint = self._InitEndpoint(VPC_ID)
self.mock_run_cmd.reset_mock()
endpoint.id = ENDPOINT_ID
self.mock_run_cmd.side_effect = [DELETE_RES, []]
endpoint.Delete()
calls = endpoint._RunCommand.call_args_list
self.assertEqual(mock.call(DELETE_ENDPOINT_CMD), calls[0])
self.assertEqual(mock.call(QUERY_ENDPOINTS_CMD), calls[1])
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| -2,071,237,302,103,893,000
| 35.705357
| 80
| 0.704208
| false
| 3.344996
| true
| false
| false
|
mvaled/sentry
|
src/sentry/integrations/gitlab/issues.py
|
2
|
6466
|
from __future__ import absolute_import
import re
from django.core.urlresolvers import reverse
from sentry.integrations.exceptions import ApiError, IntegrationError, ApiUnauthorized
from sentry.integrations.issues import IssueBasicMixin
from sentry.utils.http import absolute_uri
ISSUE_EXTERNAL_KEY_FORMAT = re.compile(r".+:(.+)#(.+)")
class GitlabIssueBasic(IssueBasicMixin):
def make_external_key(self, data):
return u"{}:{}".format(self.model.metadata["domain_name"], data["key"])
def get_issue_url(self, key):
match = ISSUE_EXTERNAL_KEY_FORMAT.match(key)
project, issue_id = match.group(1), match.group(2)
return u"{}/{}/issues/{}".format(self.model.metadata["base_url"], project, issue_id)
def get_persisted_default_config_fields(self):
return ["project"]
def get_projects_and_default(self, group, **kwargs):
params = kwargs.get("params", {})
defaults = self.get_project_defaults(group.project_id)
kwargs["repo"] = params.get("project", defaults.get("project"))
# In GitLab Repositories are called Projects
default_project, project_choices = self.get_repository_choices(group, **kwargs)
return default_project, project_choices
def create_default_repo_choice(self, default_repo):
client = self.get_client()
try:
# default_repo should be the project_id
project = client.get_project(default_repo)
except (ApiError, ApiUnauthorized):
return ("", "")
return (project["id"], project["name_with_namespace"])
def get_create_issue_config(self, group, **kwargs):
default_project, project_choices = self.get_projects_and_default(group, **kwargs)
kwargs["link_referrer"] = "gitlab_integration"
fields = super(GitlabIssueBasic, self).get_create_issue_config(group, **kwargs)
org = group.organization
autocomplete_url = reverse(
"sentry-extensions-gitlab-search", args=[org.slug, self.model.id]
)
return [
{
"name": "project",
"label": "GitLab Project",
"type": "select",
"url": autocomplete_url,
"choices": project_choices,
"defaultValue": default_project,
"required": True,
}
] + fields
def create_issue(self, data, **kwargs):
client = self.get_client()
project_id = data.get("project")
if not project_id:
raise IntegrationError("project kwarg must be provided")
try:
issue = client.create_issue(
project=project_id,
data={"title": data["title"], "description": data["description"]},
)
project = client.get_project(project_id)
except ApiError as e:
raise IntegrationError(self.message_from_error(e))
project_and_issue_iid = "%s#%s" % (project["path_with_namespace"], issue["iid"])
return {
"key": project_and_issue_iid,
"title": issue["title"],
"description": issue["description"],
"url": issue["web_url"],
"project": project_id,
"metadata": {"display_name": project_and_issue_iid},
}
def after_link_issue(self, external_issue, **kwargs):
data = kwargs["data"]
project_id, issue_id = data.get("externalIssue", "").split("#")
if not (project_id and issue_id):
raise IntegrationError("Project and Issue id must be provided")
client = self.get_client()
comment = data.get("comment")
if not comment:
return
try:
client.create_issue_comment(
project_id=project_id, issue_id=issue_id, data={"body": comment}
)
except ApiError as e:
raise IntegrationError(self.message_from_error(e))
def get_link_issue_config(self, group, **kwargs):
default_project, project_choices = self.get_projects_and_default(group, **kwargs)
org = group.organization
autocomplete_url = reverse(
"sentry-extensions-gitlab-search", args=[org.slug, self.model.id]
)
return [
{
"name": "project",
"label": "GitLab Project",
"type": "select",
"default": default_project,
"choices": project_choices,
"url": autocomplete_url,
"updatesForm": True,
"required": True,
},
{
"name": "externalIssue",
"label": "Issue",
"default": "",
"type": "select",
"url": autocomplete_url,
"required": True,
},
{
"name": "comment",
"label": "Comment",
"default": u"Sentry issue: [{issue_id}]({url})".format(
url=absolute_uri(
group.get_absolute_url(params={"referrer": "gitlab_integration"})
),
issue_id=group.qualified_short_id,
),
"type": "textarea",
"required": False,
"help": ("Leave blank if you don't want to " "add a comment to the GitLab issue."),
},
]
def get_issue(self, issue_id, **kwargs):
project_id, issue_num = issue_id.split("#")
client = self.get_client()
if not project_id:
raise IntegrationError("project must be provided")
if not issue_num:
raise IntegrationError("issue must be provided")
try:
issue = client.get_issue(project_id, issue_num)
project = client.get_project(project_id)
except ApiError as e:
raise IntegrationError(self.message_from_error(e))
project_and_issue_iid = "%s#%s" % (project["path_with_namespace"], issue["iid"])
return {
"key": project_and_issue_iid,
"title": issue["title"],
"description": issue["description"],
"url": issue["web_url"],
"project": project_id,
"metadata": {"display_name": project_and_issue_iid},
}
def get_issue_display_name(self, external_issue):
return external_issue.metadata["display_name"]
|
bsd-3-clause
| 5,595,595,440,710,333,000
| 35.122905
| 99
| 0.548871
| false
| 4.185113
| false
| false
| false
|
Alshain-Oy/Cloudsnake-Application-Server
|
clients/htpasswdFS.py
|
1
|
5036
|
#!/usr/bin/env python
# Cloudsnake Application server
# Licensed under Apache License, see license.txt
# Author: Markus Gronholm <markus@alshain.fi> Alshain Oy
import fuse
import time, sys
import stat, os, errno
import libCloudSnakeClient as SnakeClient
fuse.fuse_python_api = (0, 2)
class ObjectStat(fuse.Stat):
def __init__( self ):
self.st_mode = stat.S_IFDIR | 0755
self.st_ino = 0
self.st_dev = 0
self.st_nlink = 2
self.st_uid = 0
self.st_gid = 0
self.st_size = 4096
self.st_atime = int( time.time() )
self.st_mtime = int( time.time() )
self.st_ctime = int( time.time() )
class testFS( fuse.Fuse ):
def __init__(self, *args, **kw):
fuse.Fuse.__init__(self, *args, **kw)
print 'Init complete.'
self.files = []
#self.files.append( 'htpasswd_id' )
self.client = None
def attach_cloudsnake( self, client ):
self.client = client
self.snake = SnakeClient.CloudSnakeMapper( self.client )
self.files = self.snake.apache_get_groups()
print "self.files:", self.files
self.content = {}
def getattr(self, path):
"""
- st_mode (protection bits)
- st_ino (inode number)
- st_dev (device)
- st_nlink (number of hard links)
- st_uid (user ID of owner)
- st_gid (group ID of owner)
- st_size (size of file, in bytes)
- st_atime (time of most recent access)
- st_mtime (time of most recent content modification)
- st_ctime (platform dependent; time of most recent metadata change on Unix,
or the time of creation on Windows).
"""
print '*** getattr', path
#depth = getDepth(path) # depth of path, zero-based from root
#pathparts = getParts(path) # the actual parts of the path
#return -errno.ENOSYS
self.files = self.snake.apache_get_groups()
print "self.files:", self.files
st = ObjectStat()
parts = path.split( '/' )
if len( parts ) > 1:
fn = parts[ 1 ]
else:
fn = ''
if fn == '':
print "returing stats"
st.st_nlink += len( self.files )
return st
elif fn not in self.files:
print "No such file! (%s)"%fn
return -errno.ENOENT
else:
print "Returning stats.."
st.st_mode = stat.S_IFREG | 0755
self.content[ fn ] = self.snake.apache_get_content( fn )
st.st_size = len( self.content[ fn ] )
return st
# def getdir(self, path):
# """
# return: [[('file1', 0), ('file2', 0), ... ]]
# """
# self.files = self.snake.apache_get_groups()
#
# print '*** getdir', path
# #return -errno.ENOSYS
# return [[ (x, 0) for x in self.files ]]
def readdir(self, path, offset):
print "*** readdir"
dirents = [ '.', '..' ]
self.files = self.snake.apache_get_groups()
print "self.files:", self.files
if path == '/':
dirents.extend( self.files )
for r in dirents:
yield fuse.Direntry( str( r ))
def chmod( self, path, mode ):
print '*** chmod', path, oct(mode)
#return -errno.ENOSYS
return 0
def chown( self, path, uid, gid ):
print '*** chown', path, uid, gid
#return -errno.ENOSYS
return 0
def fsync( self, path, isFsyncFile ):
print '*** fsync', path, isFsyncFile
return -errno.ENOSYS
def link( self, targetPath, linkPath ):
print '*** link', targetPath, linkPath
return -errno.ENOSYS
def mkdir( self, path, mode ):
print '*** mkdir', path, oct(mode)
return -errno.ENOSYS
def mknod( self, path, mode, dev ):
print '*** mknod', path, oct(mode), dev
return -errno.ENOSYS
def open( self, path, flags ):
print '*** open', path, flags
#return -errno.ENOSYS
return 0
def read( self, path, length, offset ):
print '*** read', path, length, offset
#return -errno.ENOSYS
parts = path.split( '/' )
fn = parts[ 1 ]
self.content[ fn ] = self.snake.apache_get_content( fn )
#return self.content[ fn ][ offset : offset + length ]
out = self.content[ fn ][ offset : offset + length ]
print "out:", out
return str( out )
def readlink( self, path ):
print '*** readlink', path
return -errno.ENOSYS
def release( self, path, flags ):
print '*** release', path, flags
#return -errno.ENOSYS
return 0
def rename( self, oldPath, newPath ):
print '*** rename', oldPath, newPath
return -errno.ENOSYS
def rmdir( self, path ):
print '*** rmdir', path
return -errno.ENOSYS
def statfs( self ):
print '*** statfs'
return -errno.ENOSYS
def symlink( self, targetPath, linkPath ):
print '*** symlink', targetPath, linkPath
return -errno.ENOSYS
def truncate( self, path, size ):
print '*** truncate', path, size
return -errno.ENOSYS
def unlink( self, path ):
print '*** unlink', path
return -errno.ENOSYS
def utime( self, path, times ):
print '*** utime', path, times
return -errno.ENOSYS
def write( self, path, buf, offset ):
print '*** write', path, buf, offset
return -errno.ENOSYS
if __name__ == '__main__':
client = SnakeClient.CloudSnakeClient( 'http://localhost:8500', 'main' )
fs = testFS()
fs.attach_cloudsnake( client )
fs.flags = 0
fs.multihreaded = 0
fs.parse()
fs.main()
#print fs.main.__doc__
|
apache-2.0
| 8,844,387,426,208,073,000
| 20.991266
| 78
| 0.626291
| false
| 2.804009
| false
| false
| false
|
ebu/ebu-tt-live-toolkit
|
ebu_tt_live/node/producer.py
|
1
|
3589
|
import logging
from .base import AbstractProducerNode
from datetime import timedelta
from ebu_tt_live.bindings import div_type, br_type, p_type, style_type, styling, layout, region_type, span_type
from ebu_tt_live.bindings._ebuttdt import LimitedClockTimingType
from ebu_tt_live.documents.ebutt3 import EBUTT3Document
from ebu_tt_live.errors import EndOfData
from ebu_tt_live.strings import END_OF_DATA, DOC_PRODUCED
document_logger = logging.getLogger('document_logger')
class SimpleProducer(AbstractProducerNode):
_document_sequence = None
_input_blocks = None
_reference_clock = None
_provides = EBUTT3Document
def __init__(self, node_id, producer_carriage, document_sequence, input_blocks):
super(SimpleProducer, self).__init__(node_id=node_id, producer_carriage=producer_carriage)
self._document_sequence = document_sequence
self._input_blocks = input_blocks
self._reference_clock = document_sequence.reference_clock
@property
def reference_clock(self):
return self._reference_clock
@property
def document_sequence(self):
return self._document_sequence
@staticmethod
def _interleave_line_breaks(items, style=None):
end_list = []
for item in items:
end_list.append(
span_type(
item,
style=style,
_strict_keywords=False
)
)
end_list.append(br_type())
# We don't require the last linebreak so remove it.
end_list.pop()
return end_list
def _create_fragment(self, lines, style=None):
return div_type(
p_type(
*self._interleave_line_breaks(lines, style=style),
id='ID{:03d}'.format(1),
_strict_keywords=False
),
region='bottomRegion'
)
def process_document(self, document=None, **kwargs):
activation_time = self._reference_clock.get_time() + timedelta(seconds=1)
if self._input_blocks:
try:
lines = self._input_blocks.next()
except StopIteration:
raise EndOfData(END_OF_DATA)
else:
lines = [LimitedClockTimingType(activation_time)]
document = self._document_sequence.new_document()
# Add default style
document.binding.head.styling = styling(
style_type(
id='defaultStyle1',
backgroundColor="rgb(0, 0, 0)",
color="rgb(255, 255, 255)",
linePadding="0.5c",
fontFamily="sansSerif"
)
)
document.binding.head.layout = layout(
region_type(
id='bottomRegion',
origin='14.375% 60%',
extent='71.25% 24%',
displayAlign='after',
writingMode='lrtb',
overflow="visible"
)
)
document.add_div(
self._create_fragment(
lines,
'defaultStyle1'
),
)
document.set_dur(LimitedClockTimingType(timedelta(seconds=1)))
document.set_begin(LimitedClockTimingType(activation_time))
document.validate()
document_logger.info(
DOC_PRODUCED.format(
sequence_identifier=document.sequence_identifier,
sequence_number=document.sequence_number
)
)
self.producer_carriage.emit_data(document, **kwargs)
|
bsd-3-clause
| -5,551,885,586,373,914,000
| 30.761062
| 111
| 0.575091
| false
| 4.227326
| false
| false
| false
|
googleinterns/where-is-my-watch
|
GpsDataAnalyzer/visualizer/visualizer.py
|
1
|
6027
|
"""
Usage: visualizer.py
Visualize the classified data as histogram and line graph with min/max/mean/std/availability information
"""
import matplotlib.pyplot as plt
import matplotlib.dates as dates
import numpy as np
import pandas as pd
import os
from datetime import datetime
from datetime import timedelta
from datetime import timezone
"""
Deviation distribution zone
"""
HIGH_CONFIDENCE_THRESHOLD = 5
LOW_CONFIDENCE_THRESHOLD = 10
class Visualizer:
"""
Classify the deviation of distance and visualize the deviations of distance/speed/altitude
"""
def __init__(self):
current_directory = os.path.dirname(__file__)
current_time = datetime.strftime(datetime.now(), "%Y-%m-%dT%H%M%S")
self.output_file_folder = os.path.join(current_directory, current_time)
os.mkdir(self.output_file_folder)
def get_min_deviation(self, data):
"""
Get the min value of deviation
Args:
data: the deviation data
"""
return min(deviation for deviation in data)
def get_max_deviation(self, data):
"""
Get the max value of deviation
Args:
data: the deviation data
"""
return max(deviation for deviation in data)
def classify_deviation(self, deviation_dataframe):
"""
Classify the deviation of distance according to its absolute value, and mark the data confidence (1, 2, 3).
Higher score means higher confidence and accuracy.
Args:
deviation_dataframe: a dataframe containing time and deviation of distance/speed/altitude
Returns:
A dataframe after distance deviation classified with confidence
"""
deviation_list = deviation_dataframe["Deviations"]
confidence = []
for deviation in deviation_list:
abs_deviation = abs(deviation)
if abs_deviation <= HIGH_CONFIDENCE_THRESHOLD:
confidence.append(3)
elif abs_deviation <= LOW_CONFIDENCE_THRESHOLD:
confidence.append(2)
else:
confidence.append(1)
deviation_dataframe["Confidence"] = confidence
return deviation_dataframe
def draw_hist_graph(self, data, x_label, y_label, title, availability):
"""
Draw the histogram graph and save it as a png file
Args:
data: data on y axis
x_label: label for x axis
y_label: label for y axis
title: title for the graph
availability: percentile of captured datapoints
"""
# Plot the data
fig = plt.figure(figsize=(20,10))
hist_label = "Availability: {}%".format(availability)
plt.hist(data, align='mid', bins=[0.5,1.5,2.5,3.5], rwidth=0.8, label=hist_label, orientation="horizontal", color='cornflowerblue')
# Set the title and labels
plt.legend(loc="upper left")
plt.xlabel(x_label, fontsize=10)
plt.ylabel(y_label, fontsize=10)
plt.title(title, fontsize=12)
plt.yticks(range(0,5))
# Save the graph as a png picture
my_file = "{}_Deviation_Confidence_{}.png".format(title, datetime.strftime(datetime.now(), "%Y-%m-%dT%H%M%S"))
fig.savefig(os.path.join(self.output_file_folder, my_file))
def draw_line_graph(self, x_data, x_label, y_data, y_label, title):
"""
Draw the line graph and save it as a png file
Args:
x_data: data on x axis
x_label: label for x axis
y_data: data on y axis
y_label: label for y axis
title: title for the graph
"""
# Get the absolute mean of deviation, stadard deviation, min and max deviation
abs_mean_deviation = round(np.mean(y_data),3)
std_deviation = round(np.std(y_data),3)
min_deviation = round(self.get_min_deviation(y_data), 3)
max_deviation = round(self.get_max_deviation(y_data), 3)
# Get the time duration
time_duration = x_data[len(x_data)-1] - x_data[0]
# Set the line_label and x_label
line_label = "Mean: {}\nSTD: {}\nMin: {}\nMax: {}".format(abs_mean_deviation, std_deviation, min_deviation, max_deviation)
x_label += str(time_duration)
# Plot the data
fig = plt.figure(figsize=(20,10))
ax = plt.subplot()
ax.plot(x_data, y_data, color='cornflowerblue', label= line_label)
# Format the time on x axis '%H:%M:%S'
ax.xaxis.set_major_formatter(dates.DateFormatter('%H:%M:%S'))
# Set the title and labels
plt.legend(loc="upper left")
plt.title(title +" Deviation", fontsize = 12)
plt.xlabel(x_label, fontsize = 10)
plt.ylabel(y_label, fontsize = 10)
# Save the graph as a png picture
my_file = "{}_Deviation_{}.png".format(title, datetime.strftime(datetime.now(), "%Y-%m-%dT%H%M%S"))
fig.savefig(os.path.join(self.output_file_folder, my_file))
def draw_lines_graph(self, x_data, x_label, y1_data, y2_data, y_label, title, label_1, label_2):
# Get the time duration
time_duration = x_data[len(x_data)-1] - x_data[0]
x_label += str(time_duration)
# Plot the data
fig = plt.figure(figsize=(20,10))
ax = plt.subplot()
ax.plot(x_data, y1_data, color='cornflowerblue', label=label_1)
ax.plot(x_data, y2_data, color='forestgreen', label=label_2)
# Format the time on x axis '%H:%M:%S'
ax.xaxis.set_major_formatter(dates.DateFormatter('%H:%M:%S'))
# Set the title and labels
plt.legend(loc="upper left")
plt.title(title, fontsize = 12)
plt.xlabel(x_label, fontsize = 10)
plt.ylabel(y_label, fontsize = 10)
# Save the graph as a png picture
my_file = "{}_Deviation_{}.png".format(title, datetime.strftime(datetime.now(), "%Y-%m-%dT%H%M%S"))
fig.savefig(os.path.join(self.output_file_folder, my_file))
|
apache-2.0
| 8,937,705,627,102,015,000
| 34.040698
| 139
| 0.60959
| false
| 3.681735
| false
| false
| false
|
mskala/birdie
|
birdieapp/gui/statusicon.py
|
1
|
2075
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013-2014 Ivo Nunes/Vasco Nunes
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk
from birdieapp.signalobject import SignalObject
class StatusIcon(SignalObject):
def __init__(self):
super(StatusIcon, self).init_signals()
self.statusicon = Gtk.StatusIcon()
self.statusicon.set_from_icon_name("birdie")
self.statusicon.connect("popup-menu", self.right_click_event)
self.statusicon.connect("activate", self.trayicon_activate)
def right_click_event(self, icon, button, tm):
menu = Gtk.Menu()
new_tweet = Gtk.MenuItem()
new_tweet.set_label(_("New Tweet"))
new_tweet.connect("activate", self.on_new_tweet)
menu.append(new_tweet)
quit_item = Gtk.MenuItem()
quit_item.set_label(_("Quit"))
quit_item.connect("activate", self.on_exit)
menu.append(quit_item)
menu.show_all()
menu.popup(None, None,
lambda w, x: self.statusicon.position_menu(
menu, self.statusicon),
self.statusicon, 3, tm)
def trayicon_activate (self, widget, data = None):
"""Toggle status icon"""
self.emit_signal("toggle-window-visibility")
def on_new_tweet(self, widget):
self.emit_signal_with_arg("new-tweet-compose", None)
def on_exit(self, widget):
self.emit_signal_with_args("on-exit", (None, None, None))
|
gpl-3.0
| 2,877,588,231,206,584,300
| 32.467742
| 70
| 0.659759
| false
| 3.646749
| false
| false
| false
|
Didacti/elixir
|
tests/test_dict.py
|
1
|
5070
|
"""
test the deep-set functionality
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import six
from elixir import *
def setup():
metadata.bind = 'sqlite://'
global Table1, Table2, Table3
class Table1(Entity):
t1id = Field(Integer, primary_key=True)
name = Field(String(30))
tbl2s = OneToMany('Table2')
tbl3 = OneToOne('Table3')
class Table2(Entity):
t2id = Field(Integer, primary_key=True)
name = Field(String(30))
tbl1 = ManyToOne(Table1)
class Table3(Entity):
t3id = Field(Integer, primary_key=True)
name = Field(String(30))
tbl1 = ManyToOne(Table1)
setup_all()
def teardown():
cleanup_all()
class TestDeepSet(object):
def setup(self):
create_all()
def teardown(self):
session.close()
drop_all()
def test_set_attr(self):
t1 = Table1()
t1.from_dict(dict(name='test1'))
assert t1.name == 'test1'
def test_nonset_attr(self):
t1 = Table1(name='test2')
t1.from_dict({})
assert t1.name == 'test2'
def test_set_rel(self):
t1 = Table1()
t1.from_dict(dict(tbl3={'name': 'bob'}))
assert t1.tbl3.name == 'bob'
def test_remove_rel(self):
t1 = Table1()
t1.tbl3 = Table3()
t1.from_dict(dict(tbl3=None))
assert t1.tbl3 is None
def test_update_rel(self):
t1 = Table1()
t1.tbl3 = Table3(name='fred')
t1.from_dict(dict(tbl3={'name': 'bob'}))
assert t1.tbl3.name == 'bob'
def test_extend_list(self):
t1 = Table1()
t1.from_dict(dict(tbl2s=[{'name': 'test3'}]))
assert len(t1.tbl2s) == 1
assert t1.tbl2s[0].name == 'test3'
def test_truncate_list(self):
t1 = Table1()
t2 = Table2()
t1.tbl2s.append(t2)
session.commit()
t1.from_dict(dict(tbl2s=[]))
assert len(t1.tbl2s) == 0
def test_update_list_item(self):
t1 = Table1()
t2 = Table2()
t1.tbl2s.append(t2)
session.commit()
t1.from_dict(dict(tbl2s=[{'t2id': t2.t2id, 'name': 'test4'}]))
assert len(t1.tbl2s) == 1
assert t1.tbl2s[0].name == 'test4'
def test_invalid_update(self):
t1 = Table1()
t2 = Table2()
t1.tbl2s.append(t2)
session.commit()
try:
t1.from_dict(dict(tbl2s=[{'t2id': t2.t2id+1}]))
assert False
except:
pass
def test_to(self):
t1 = Table1(t1id=50, name='test1')
assert t1.to_dict() == {'t1id': 50, 'name': 'test1'}
def test_to_deep_m2o(self):
t1 = Table1(t1id=1, name='test1')
t2 = Table2(t2id=1, name='test2', tbl1=t1)
session.flush()
assert t2.to_dict(deep={'tbl1': {}}) == \
{'t2id': 1, 'name': 'test2', 'tbl1_t1id': 1,
'tbl1': {'name': 'test1'}}
def test_to_deep_m2o_none(self):
t2 = Table2(t2id=1, name='test2')
session.flush()
assert t2.to_dict(deep={'tbl1': {}}) == \
{'t2id': 1, 'name': 'test2', 'tbl1_t1id': None, 'tbl1': None}
def test_to_deep_o2m_empty(self):
t1 = Table1(t1id=51, name='test2')
assert t1.to_dict(deep={'tbl2s': {}}) == \
{'t1id': 51, 'name': 'test2', 'tbl2s': []}
def test_to_deep_o2m(self):
t1 = Table1(t1id=52, name='test3')
t2 = Table2(t2id=50, name='test4')
t1.tbl2s.append(t2)
session.commit()
assert t1.to_dict(deep={'tbl2s':{}}) == \
{'t1id': 52,
'name': 'test3',
'tbl2s': [{'t2id': 50, 'name': 'test4'}]}
def test_to_deep_o2o(self):
t1 = Table1(t1id=53, name='test2')
t1.tbl3 = Table3(t3id=50, name='wobble')
session.commit()
assert t1.to_dict(deep={'tbl3': {}}) == \
{'t1id': 53,
'name': 'test2',
'tbl3': {'t3id': 50, 'name': 'wobble'}}
def test_to_deep_nested(self):
t3 = Table3(t3id=1, name='test3')
t1 = Table1(t1id=1, name='test1', tbl3=t3)
t2 = Table2(t2id=1, name='test2', tbl1=t1)
session.flush()
assert t2.to_dict(deep={'tbl1': {'tbl3': {}}}) == \
{'t2id': 1,
'name': 'test2',
'tbl1_t1id': 1,
'tbl1': {'name': 'test1',
'tbl3': {'t3id': 1,
'name': 'test3'}}}
class TestSetOnAliasedColumn(object):
def setup(self):
metadata.bind = 'sqlite://'
session.close()
def teardown(self):
cleanup_all(True)
def test_set_on_aliased_column(self):
class A(Entity):
name = Field(String(60), colname='strName')
setup_all(True)
a = A()
a.set(name='Aye')
assert a.name == 'Aye'
session.commit()
session.close()
|
mit
| -3,058,275,147,593,896,400
| 26.857143
| 76
| 0.506312
| false
| 3.003555
| true
| false
| false
|
Jarvie8176/wows-noob-warning
|
API/makeshift/config.py
|
1
|
1407
|
player_search = 'http://worldofwarships.com/en/community/accounts/search/?search=%s'
player_search_key_search_page = 'js-search-results'
player_search_key_player_page = 'og:url'
player_search_key_error_page = 'Error'
player_search_id_pattern_search_page = '(?<=accounts/).*(?=-)'
player_search_id_pattern_player_page = '(?<=accounts/).*(?=-)'
player_stat_page = 'http://worldofwarships.com/en/community/accounts/tab/pvp/overview/%s/'
player_stat_tab_class = 'account-main-stats-mobile'
player_stat_key_battle_fought = 'battle_fought'
player_stat_key_win_rate = 'win_rate'
player_stat_key_average_exp = 'avg_exp'
player_stat_key_average_dmg = 'avg_dmg'
player_stat_key_kd_ratio = 'kd_ratio'
player_stat_battlefought = {
'key' : '_battles',
'value' : '_number'}
player_stat_winrate = {
'key' : '_winrate',
'value' : '_number'}
player_stat_avgexp = {
'key' : '_rating',
'value' : '_number'}
player_stat_avgdmg = {
'key' : '_kd',
'value' : '_number'}
player_stat_kdratio = {
'key' : '_damage',
'value' : '_number'}
player_stat_regex_pattern = '(?<=>).*(?=<)'
|
mit
| 8,899,890,509,648,553,000
| 40.382353
| 90
| 0.50462
| false
| 3.431707
| false
| false
| false
|
cameronbwhite/PyOLP
|
PyOLP/api_objects.py
|
1
|
12957
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Copyright (C) 2013, Cameron White
#
# PyGithub is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>.
from . import api_exceptions
from .requester import Requester
import datetime
class _NotSetType:
def __repr__(self):
return "NotSet"
value = None
NotSet = _NotSetType()
class _ValuedAttribute:
def __init__(self, value):
self.value = value
class _BadAttribute:
def __init__(self, value, expectedType, exception=None):
self.__value = value
self.__expectedType = expectedType
self.__exception = exception
@property
def value(self):
raise api_exceptions.BadAttributeException(self.__value, self.__expectedType)
class ApiObject(object):
def __init__(self, requester, headers, attributes):
self._requester = requester
self._initAttributes() # virtual
self._storeAndUseAttributes(headers, attributes)
def _storeAndUseAttributes(self, headers, attributes):
# Make sure headers are assigned before calling _useAttributes
# (Some derived classes will use headers in _useAttributes)
self._headers = headers
self._rawData = attributes
self._useAttributes(attributes) # virtual
@staticmethod
def __makeSimpleAttribute(value, type):
if value is None or isinstance(value, type):
return _ValuedAttribute(value)
else:
return _BadAttribute(value, type)
@staticmethod
def _makeStringAttribute(value):
return ApiObject.__makeSimpleAttribute(value, (str, unicode))
@staticmethod
def _makeIntAttribute(value):
return ApiObject.__makeSimpleAttribute(value, (int, long))
@staticmethod
def _makeBoolAttribute(value):
return ApiObject.__makeSimpleAttribute(value, bool)
@staticmethod
def _makeFloatAttribute(value):
try:
value = float(value)
except ValueError:
pass
return ApiObject.__makeSimpleAttribute(value, float)
@staticmethod
def _makeDatetimeAttribute(value):
try:
d = datetime.datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%f")
except ValueError:
d = datetime.datetime.strptime(value, "%Y-%m-%d")
return ApiObject.__makeSimpleAttribute(d, datetime.datetime)
@property
def raw_data(self):
"""
:type: dict
"""
return self._rawData
@property
def raw_headers(self):
"""
:type: dict
"""
return self._headers
def update(self):
status, responseHeaders, output = self._requester.requestJson(
self._resource_uri.value # virtual
)
headers, data = self._requester._Requester__check(status, responseHeaders, output)
self._storeAndUseAttributes(headers, data)
class Price(ApiObject):
@property
def amount(self):
"""
:type: float
"""
return self._amount.value
@property
def created_at(self):
"""
:type: datetime
"""
return self._created_at.value
@property
def effective_date(self):
"""
:type: datetime
"""
return self._effective_date.value
@property
def id(self):
"""
:type: string
"""
return self._id.value
@property
def modified_at(self):
"""
:type: datetime
"""
return self._modified_at.value
@property
def product(self):
"""
:type: related
"""
return self._product.value
@property
def resource_uri(self):
"""
:type: string
"""
return self._resource_uri.value
def get_product(self):
headers, data = self._requester.requestJsonAndCheck(
self.product
)
return Product(self._requester, headers, data)
def _initAttributes(self):
self._amount = NotSet
self._created_at = NotSet
self._effective_date = NotSet
self._id = NotSet
self._modified_at = NotSet
self._product = NotSet
self._resource_uri = NotSet
def _useAttributes(self, attributes):
if "amount" in attributes:
self._amount = self._makeFloatAttribute(attributes["amount"])
if "created_at" in attributes:
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "effective_date" in attributes:
self._effective_date = self._makeDatetimeAttribute(attributes["effective_date"])
if "id" in attributes:
self._id = self._makeStringAttribute(attributes["id"])
if "modified_at" in attributes:
self._modified_at = self._makeDatetimeAttribute(attributes["modified_at"])
if "product" in attributes:
self._product = self._makeStringAttribute(attributes["product"])
if "resource_uri" in attributes:
self._resource_uri = self._makeStringAttribute(attributes["resource_uri"])
class Product(ApiObject):
@property
def age(self):
"""
:type: float
"""
return self._age.value
@property
def bottles_per_case(self):
"""
:type: int
"""
return self._bottles_per_case.value
@property
def code(self):
"""
:type: string
"""
return self._code.value
@property
def created_at(self):
"""
:type: datetime
"""
return self._created_at.value
@property
def description(self):
"""
:type: string
"""
return self._description.value
@property
def id(self):
"""
:type: string
"""
return self._id.value
@property
def modified_at(self):
"""
:type: datetime
"""
return self._modified_at.value
@property
def on_sale(self):
"""
:type: bool
"""
return self._on_sale.value
@property
def proof(self):
"""
:type: float
"""
return self._proof.value
@property
def resource_uri(self):
"""
:type: string
"""
return self._resource_uri.value
@property
def size(self):
"""
:type: string
"""
return self._size.value
@property
def slug(self):
"""
:type: string
"""
return self._slug.value
@property
def status(self):
"""
:type: string
"""
return self._status.value
@property
def title(self):
"""
:type: string
"""
return self._title.value
def get_price(self):
headers, data = self._requester.requestJsonAndCheck(
'/api/v1/price/' + str(self.id) + '/'
)
return Price(self._requester, headers, data)
def _initAttributes(self):
self._age = NotSet
self._bottles_per_case = NotSet
self._code = NotSet
self._created_at = NotSet
self._description = NotSet
self._id = NotSet
self._modified_at = NotSet
self._on_sale = NotSet
self._proof = NotSet
self._resource_uri = NotSet
self._size = NotSet
self._slug = NotSet
self._status = NotSet
self._title = NotSet
def _useAttributes(self, attributes):
if "age" in attributes:
self._age = self._makeFloatAttribute(attributes["age"])
if "bottles_per_case" in attributes:
self._bottles_per_case = self._makeIntAttribute(attributes["bottles_per_case"])
if "code" in attributes:
self._code = self._makeStringAttribute(attributes["code"])
if "created_at" in attributes:
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "description" in attributes:
self._description = self._makeStringAttribute(attributes["description"])
if "id" in attributes:
self._id = self._makeStringAttribute(attributes["id"])
if "modified_at" in attributes:
self._modified_at = self._makeDatetimeAttribute(attributes["modified_at"])
if "on_sale" in attributes:
self._on_sale = self._makeBoolAttribute(attributes["on_sale"])
if "proof" in attributes:
self._proof = self._makeFloatAttribute(attributes["proof"])
if "resource_uri" in attributes:
self._resource_uri = self._makeStringAttribute(attributes["resource_uri"])
if "size" in attributes:
self._size = self._makeStringAttribute(attributes["size"])
if "slug" in attributes:
self._slug = self._makeStringAttribute(attributes["slug"])
if "status" in attributes:
self._status = self._makeStringAttribute(attributes["status"])
if "title" in attributes:
self._title = self._makeStringAttribute(attributes["title"])
class Store(ApiObject):
@property
def address(self):
"""
:type: string
"""
return self._address.value
@property
def address_raw(self):
"""
:type: string
"""
return self._address_raw.value
@property
def county(self):
"""
:type: string
"""
return self._county.value
@property
def hours_raw(self):
"""
:type: string
"""
return self._hours_raw.value
@property
def id(self):
"""
:type: string
"""
return self._id.value
@property
def key(self):
"""
:type: int
"""
return self._key.value
@property
def latitude(self):
"""
:type: float
"""
return self._latitude.value
@property
def longitude(self):
"""
:type: float
"""
return self._longitude.value
@property
def name(self):
"""
:type: string
"""
return self._name.value
@property
def phone(self):
"""
:type: string
"""
return self._phone.value
@property
def resource_uri(self):
"""
:type: string
"""
return self._resource_uri.value
def _initAttributes(self):
self._address = NotSet
self._address_raw = NotSet
self._county = NotSet
self._hours_raw = NotSet
self._id = NotSet
self._key = NotSet
self._latitude = NotSet
self._longitude = NotSet
self._name = NotSet
self._phone = NotSet
self._resource_uri = NotSet
def _useAttributes(self, attributes):
if "address" in attributes:
self._address = self._makeStringAttribute(attributes["address"])
if "address_raw" in attributes:
self._address_raw = self._makeStringAttribute(attributes["address_raw"])
if "county" in attributes:
self._county = self._makeStringAttribute(attributes["county"])
if "hours_raw" in attributes:
self._hours_raw = self._makeStringAttribute(attributes["hours_raw"])
if "id" in attributes:
self._id = self._makeStringAttribute(attributes["id"])
if "key" in attributes:
self._key = self._makeIntAttribute(attributes["key"])
if "latitude" in attributes:
self._latitude = self._makeFloatAttribute(attributes["latitude"])
if "longitude" in attributes:
self._longitude = self._makeFloatAttribute(attributes["longitude"])
if "name" in attributes:
self._name = self._makeStringAttribute(attributes["name"])
if "phone" in attributes:
self._phone = self._makeStringAttribute(attributes["phone"])
if "resource_uri" in attributes:
self._resource_uri = self._makeStringAttribute(attributes["resource_uri"])
|
gpl-3.0
| -8,386,608,160,705,220,000
| 26.864516
| 92
| 0.561704
| false
| 4.44037
| false
| false
| false
|
schleichdi2/OPENNFR-6.1-CORE
|
opennfr-openembedded-core/meta/lib/oeqa/selftest/archiver.py
|
1
|
1776
|
from oeqa.selftest.base import oeSelfTest
from oeqa.utils.commands import bitbake, get_bb_vars
from oeqa.utils.decorators import testcase
import glob
import os
import shutil
class Archiver(oeSelfTest):
@testcase(1345)
def test_archiver_allows_to_filter_on_recipe_name(self):
"""
Summary: The archiver should offer the possibility to filter on the recipe. (#6929)
Expected: 1. Included recipe (busybox) should be included
2. Excluded recipe (zlib) should be excluded
Product: oe-core
Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
"""
include_recipe = 'busybox'
exclude_recipe = 'zlib'
features = 'INHERIT += "archiver"\n'
features += 'ARCHIVER_MODE[src] = "original"\n'
features += 'COPYLEFT_PN_INCLUDE = "%s"\n' % include_recipe
features += 'COPYLEFT_PN_EXCLUDE = "%s"\n' % exclude_recipe
self.write_config(features)
bitbake('-c clean %s %s' % (include_recipe, exclude_recipe))
bitbake("%s %s" % (include_recipe, exclude_recipe))
bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS'])
src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
# Check that include_recipe was included
included_present = len(glob.glob(src_path + '/%s-*' % include_recipe))
self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe)
# Check that exclude_recipe was excluded
excluded_present = len(glob.glob(src_path + '/%s-*' % exclude_recipe))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe)
|
gpl-2.0
| -3,082,833,724,617,833,500
| 40.302326
| 95
| 0.63964
| false
| 3.530815
| true
| false
| false
|
phil-el/phetools
|
hocr/hocr.py
|
1
|
9939
|
# -*- coding: utf-8 -*-
#
# @file hocr.py
#
# @remark Copyright 2014 Philippe Elie
# @remark Read the file COPYING
#
# @author Philippe Elie
import sys
import os
from common import utils
import hashlib
from ocr import pdf_to_djvu
from ocr import ocr_djvu
from ocr import djvu_text_to_hocr
from ocr import ocr
from common import db
import re
def lang_to_site(lang):
sites = {
'nb' : 'no',
}
return sites.get(lang, lang)
tmp_dir = os.path.expanduser('~/tmp/hocr/')
def get_tmp_dir(lang):
if type(lang) == type(u''):
lang = lang.encode('utf-8')
return tmp_dir + lang + '/'
def bookname_md5(book_name):
h = hashlib.md5()
h.update(book_name)
return h.hexdigest()
def cache_path(book_name, lang):
base_dir = os.path.expanduser('~/cache/hocr/') + '%s/%s/%s/'
h = bookname_md5(book_name + lang_to_site(lang))
return base_dir % (h[0:2], h[2:4], h[4:])
def read_sha1(path):
fd = open(path + 'sha1.sum')
sha1 = fd.read()
fd.close()
return sha1
def check_sha1(path, sha1):
if os.path.exists(path + 'sha1.sum'):
old_sha1 = read_sha1(path)
if old_sha1 == sha1:
return True
return False
def check_and_upload(url, filename, sha1):
if not os.path.exists(filename) or utils.sha1(filename) != sha1:
if not utils.copy_file_from_url(url, filename, sha1):
return False
return True
def db_sha1(domain, family, bookname):
conn = db.create_conn(domain = domain, family = family)
cursor = db.use_db(conn, domain, family)
q = 'SELECT img_sha1 FROM image WHERE img_name = %s'
cursor.execute(q, [bookname])
data = cursor.fetchall()
cursor.close()
conn.close()
return data[0][0] if len(data) else None
def get_sha1(lang, bookname):
if type(bookname) == type(u''):
bookname = bookname.encode('utf-8')
url = None
md5 = bookname_md5(bookname)
commons = False
sha1 = db_sha1(lang, 'wikisource', bookname)
if not sha1:
sha1 = db_sha1('commons', 'wiki', bookname)
commons = True
if sha1:
sha1 = "%040x" % int(sha1, 36)
# FIXME: don't hardcode this.
url = 'https://upload.wikimedia.org/wikisource/%s/' % lang
if commons:
url = 'https://upload.wikimedia.org/wikipedia/commons/'
url += md5[0] + '/' + md5[0:2] + '/' + bookname
return sha1, url
# check if data are uptodate
#
# return:
# -1 if the File: doesn't exist
# -2 and exception occured during file copy
# 0 data exist and uptodate
# 1 File: exists but data outdated or not existing
# if it return 1 the file is uploaded if it didn't already exists.
def is_uptodate(lang, book):
path = cache_path(book, lang)
url = None
sha1, url = get_sha1(lang, book)
if not sha1:
return -1
if check_sha1(path, sha1):
return 0
if not os.path.exists(path):
os.makedirs(path)
# This is racy, if two hocr try to create the same directory, the directory
# can't exist when testing it but is created by the other process before
# makedirs() is called, so protect it with a try/except.
temp_dir = get_tmp_dir(lang)
if not os.path.exists(temp_dir):
try:
os.makedirs(temp_dir)
except OSError, e:
import errno
if e.errno != errno.EEXIST:
raise
if not check_and_upload(url, temp_dir + book, sha1):
return -2
return 1
def write_sha1(out_dir, in_file):
sha1 = utils.sha1(in_file)
utils.write_sha1(sha1, out_dir + "sha1.sum")
def fast_hocr(book, lang):
print "fast_hocr"
path = cache_path(book, lang)
print "out_dir:", path
options = djvu_text_to_hocr.default_options()
options.compress = 'bzip2'
options.out_dir = path
options.silent = True
in_file = get_tmp_dir(lang) + book
if djvu_text_to_hocr.parse(options, in_file) == 0:
return True
return False
def slow_hocr(lang, book, in_file):
print "slow_hocr"
path = cache_path(book, lang)
print "out_dir:", path
options = ocr_djvu.default_options()
options.silent = True
options.compress = 'bzip2'
options.config = 'hocr'
options.num_thread = 1
options.lang = ocr.tesseract_languages.get(lang, 'eng')
options.out_dir = path
print "Using tesseract lang:", options.lang
ret = ocr_djvu.ocr_djvu(options, in_file)
# FIXME: should go in ocr_djvu.cleanup() but better if cleanup() can
# be triggered by some sort of ocr_djvu module unload
try:
os.rmdir(options.temp_tiff_dir)
except:
print >> sys.stderr, "unable to remove directory:", options.temp_tiff_dir
return ret
# is_uptodate() must be called first to ensure the file is uploaded.
def hocr(options):
path = cache_path(options.book, options.lang)
if os.path.exists(path + 'sha1.sum'):
os.remove(path + 'sha1.sum')
in_file = get_tmp_dir(options.lang) + options.book
done = False
if in_file.endswith('.pdf'):
# Don't do slow hocr for ws.ru as .pdf ==> slow_hocr, don't try to
# convert pdf to djvu for ru.ws
if options.lang != 'ru':
djvuname = pdf_to_djvu.pdf_to_djvu(in_file)
else:
djvuname = None
else:
djvuname = in_file
if options.lang != 'bn' and djvu_text_to_hocr.has_word_bbox(in_file):
done = fast_hocr(options.book, options.lang)
# djvuname == None if pdf_to_djvu() fail to convert the file
if not done and djvuname and options.lang != 'ru':
done = slow_hocr(options.lang, options.book, djvuname)
# never fail for ws.ru, see above.
if done or options.lang == 'ru':
write_sha1(path, in_file)
if djvuname:
os.remove(djvuname)
if djvuname != in_file:
os.remove(in_file)
return done
def update_db(lang, bookname):
import hocr_request
db_hocr = hocr_request.DbHocr()
with db.connection(db_hocr):
path = cache_path(bookname, lang)
if os.path.exists(path + 'sha1.sum'):
sha1 = read_sha1(path)
db_hocr.add_update_row(bookname, lang, sha1)
else:
print >> sys.stderr, "Can't locate sha1.sum", path
def ret_val(error, text):
if error:
print >> sys.stderr, "Error: %d, %s" % (error, text)
return { 'error' : error, 'text' : text }
def get_hocr(lang, title):
# FIXME, delete all no ocr and redo them with nb code lang.
if lang == 'nb':
lang = 'no'
if type(title) == type(u''):
title = title.encode('utf-8')
title = title.replace(' ', '_')
try:
if lang == 'bn':
title = unicode(title, 'utf-8')
page_nr = re.sub(u'^.*/([০-৯]+)$', '\\1', title)
book_name = re.sub(u'^(.*?)(/[০-৯]+)?$', '\\1', title)
book_name = book_name.encode('utf-8')
result = ord(page_nr[0]) - ord(u'০')
for ch in page_nr[1:]:
result *= 10
result += ord(ch) - ord(u'০')
page_nr = result
else:
page_nr = re.sub('^.*/([0-9]+)$', '\\1', title)
book_name = re.sub('^(.*?)(/[0-9]+)?$', '\\1', title)
page_nr = int(page_nr)
except:
return ret_val(1, "unable to extract page number from page: " + title)
path = cache_path(book_name, lang)
filename = path + 'page_%04d.hocr' % page_nr
# We support data built with different compress scheme than the one
# actually generated by the server
text = utils.uncompress_file(filename, [ 'bzip2', 'gzip', '' ])
if text == None:
# not available, add a request to do this hocr so we build data
# lazilly but we filter here unsupported file type
if book_name.endswith('.djvu') or book_name.endswith('.pdf'):
import hocr_request
hocr_request.add_hocr_request(lang, book_name, True)
return ret_val(1, "unable to locate file %s for page %s lang %s" % (filename, book_name, lang))
# work-around https://code.google.com/p/tesseract-ocr/issues/detail?id=690&can=1&q=utf-8 a simple patch exists: https://code.google.com/p/tesseract-ocr/source/detail?r=736# but it's easier to do a double conversion to remove invalid utf8 rather than to maintain a patched version of tesseract.
text = unicode(text, 'utf-8', 'ignore')
text = text.encode('utf-8', 'ignore')
return ret_val(0, text)
def default_options():
class Options:
pass
options = Options()
options.book = None
options.lang = None
return options
def main():
options = default_options()
for arg in sys.argv[1:]:
if arg == '-help':
pass
elif arg.startswith('-book:'):
options.book = arg[len('-book:'):]
options.book = options.book.replace(' ', '_')
elif arg.startswith('-lang:'):
options.lang = arg[len('-lang:'):]
else:
print >> sys.stderr, 'unknown option:', sys.argv
exit(1)
if not options.book or not options.lang:
print >> sys.stderr, 'missing option -lang: and/or -book:', sys.argv
exit(1)
ret = is_uptodate(options.lang, options.book)
if ret > 0:
if not hocr(options):
print >> sys.stderr, 'Error, hocr fail'
ret = 2
else:
update_db(options.lang, options.book)
ret = 0
elif ret < 0:
print >> sys.stderr, "Error, file doesn't exist:", ret
ret = 3 + abs(ret)
else:
update_db(options.lang, options.book)
return ret
if __name__ == '__main__':
cache_dir = 'hocr'
if not os.path.exists(os.path.expanduser('~/cache/' + cache_dir)):
os.mkdir(os.path.expanduser('~/cache/' + cache_dir))
try:
ret = main()
except:
utils.print_traceback()
exit(4)
exit(ret)
|
gpl-3.0
| 1,838,696,693,971,659,800
| 27.282051
| 297
| 0.586481
| false
| 3.185815
| false
| false
| false
|
dchaplinsky/garnahata.in.ua
|
garnahata_site/garnahata_site/urls.py
|
1
|
2269
|
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.sitemaps.views import sitemap
from django.contrib.sitemaps import GenericSitemap
from django.conf.urls import include, url
from wagtail.core import urls as wagtail_urls
from wagtail.admin import urls as wagtailadmin_urls
from garnahata_site.sitemaps import MainXML, NewsXML, StaticXML
from garnahata_site.feeds import LatestNewsFeed
from catalog import views as catalog_views
from catalog.models import Address
from cms_pages import views as cms_pages_views
urlpatterns = [
# url(r'^ajax/suggest$', catalog_views.suggest, name='suggest'),
url(r'^search/suggest$', catalog_views.SuggestView.as_view(), name="suggest"),
url(r'^a/(?P<slug>.+)$', catalog_views.address_details,
name='address_details'),
url(r'^tag/', include('cms_pages.urls')),
url(r'^latest$', catalog_views.latest_addresses,
name='latest_addresses'),
url(r'^by_city$', catalog_views.addresses_by_city,
name='addresses_by_city'),
url(r'^news$', cms_pages_views.news, name='news'),
url(r'^news/special$', cms_pages_views.news, name='special_news',
kwargs={'special': True}),
url(r'^search$', catalog_views.search, name='search'),
url(r'^sitemap\.xml$', sitemap, {
'sitemaps': {
'main': MainXML,
'adresses': GenericSitemap({
'queryset': Address.objects.all(),
'date_field': "date_added",
}),
'news': NewsXML,
'static': StaticXML,
}},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^admin/fs/', include('fs.urls')),
url(r'^search_ownerships$', catalog_views.search,
name='search_ownerships', kwargs={"sources": ["ownerships"]}),
url(r'^search_addresses$', catalog_views.search,
name='search_addresses', kwargs={"sources": ["addresses"]}),
url(r'^feeds/news/$', LatestNewsFeed(), name="rss_feed"),
url(r'^tinymce/', include('tinymce.urls')),
url(r'^admin/', admin.site.urls),
url(r'^cms/', include(wagtailadmin_urls)),
url(r'', include(wagtail_urls)),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
mit
| -5,547,407,876,952,480,000
| 31.884058
| 82
| 0.651829
| false
| 3.69544
| false
| true
| false
|
aaltay/beam
|
sdks/python/apache_beam/typehints/typed_pipeline_test.py
|
1
|
19346
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the type-hint objects and decorators."""
# pytype: skip-file
from __future__ import absolute_import
import sys
import typing
import unittest
# patches unittest.TestCase to be python3 compatible
import future.tests.base # pylint: disable=unused-import
import apache_beam as beam
from apache_beam import pvalue
from apache_beam import typehints
from apache_beam.options.pipeline_options import OptionsContext
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.typehints import WithTypeHints
from apache_beam.typehints.decorators import get_signature
# These test often construct a pipeline as value | PTransform to test side
# effects (e.g. errors).
# pylint: disable=expression-not-assigned
class MainInputTest(unittest.TestCase):
def test_bad_main_input(self):
@typehints.with_input_types(str, int)
def repeat(s, times):
return s * times
with self.assertRaises(typehints.TypeCheckError):
[1, 2, 3] | beam.Map(repeat, 3)
def test_non_function(self):
result = ['a', 'bb', 'c'] | beam.Map(str.upper)
self.assertEqual(['A', 'BB', 'C'], sorted(result))
result = ['xa', 'bbx', 'xcx'] | beam.Map(str.strip, 'x')
self.assertEqual(['a', 'bb', 'c'], sorted(result))
result = ['1', '10', '100'] | beam.Map(int)
self.assertEqual([1, 10, 100], sorted(result))
result = ['1', '10', '100'] | beam.Map(int, 16)
self.assertEqual([1, 16, 256], sorted(result))
@unittest.skipIf(
sys.version_info.major >= 3 and sys.version_info < (3, 7, 0),
'Function signatures for builtins are not available in Python 3 before '
'version 3.7.')
def test_non_function_fails(self):
with self.assertRaises(typehints.TypeCheckError):
[1, 2, 3] | beam.Map(str.upper)
def test_loose_bounds(self):
@typehints.with_input_types(typing.Union[int, float])
@typehints.with_output_types(str)
def format_number(x):
return '%g' % x
result = [1, 2, 3] | beam.Map(format_number)
self.assertEqual(['1', '2', '3'], sorted(result))
def test_typed_dofn_class(self):
@typehints.with_input_types(int)
@typehints.with_output_types(str)
class MyDoFn(beam.DoFn):
def process(self, element):
return [str(element)]
result = [1, 2, 3] | beam.ParDo(MyDoFn())
self.assertEqual(['1', '2', '3'], sorted(result))
with self.assertRaisesRegex(typehints.TypeCheckError,
r'requires.*int.*got.*str'):
['a', 'b', 'c'] | beam.ParDo(MyDoFn())
with self.assertRaisesRegex(typehints.TypeCheckError,
r'requires.*int.*got.*str'):
[1, 2, 3] | (beam.ParDo(MyDoFn()) | 'again' >> beam.ParDo(MyDoFn()))
def test_typed_callable_iterable_output(self):
@typehints.with_input_types(int)
@typehints.with_output_types(typehints.Iterable[typehints.Iterable[str]])
def do_fn(element):
return [[str(element)] * 2]
result = [1, 2] | beam.ParDo(do_fn)
self.assertEqual([['1', '1'], ['2', '2']], sorted(result))
def test_typed_dofn_instance(self):
class MyDoFn(beam.DoFn):
def process(self, element):
return [str(element)]
my_do_fn = MyDoFn().with_input_types(int).with_output_types(str)
result = [1, 2, 3] | beam.ParDo(my_do_fn)
self.assertEqual(['1', '2', '3'], sorted(result))
with self.assertRaises(typehints.TypeCheckError):
['a', 'b', 'c'] | beam.ParDo(my_do_fn)
with self.assertRaises(typehints.TypeCheckError):
[1, 2, 3] | (beam.ParDo(my_do_fn) | 'again' >> beam.ParDo(my_do_fn))
def test_filter_type_hint(self):
@typehints.with_input_types(int)
def filter_fn(data):
return data % 2
self.assertEqual([1, 3], [1, 2, 3] | beam.Filter(filter_fn))
def test_partition(self):
with TestPipeline() as p:
even, odd = (p
| beam.Create([1, 2, 3])
| 'even_odd' >> beam.Partition(lambda e, _: e % 2, 2))
self.assertIsNotNone(even.element_type)
self.assertIsNotNone(odd.element_type)
res_even = (
even
| 'IdEven' >> beam.ParDo(lambda e: [e]).with_input_types(int))
res_odd = (
odd
| 'IdOdd' >> beam.ParDo(lambda e: [e]).with_input_types(int))
assert_that(res_even, equal_to([2]), label='even_check')
assert_that(res_odd, equal_to([1, 3]), label='odd_check')
def test_typed_dofn_multi_output(self):
class MyDoFn(beam.DoFn):
def process(self, element):
if element % 2:
yield beam.pvalue.TaggedOutput('odd', element)
else:
yield beam.pvalue.TaggedOutput('even', element)
with TestPipeline() as p:
res = (
p
| beam.Create([1, 2, 3])
| beam.ParDo(MyDoFn()).with_outputs('odd', 'even'))
self.assertIsNotNone(res[None].element_type)
self.assertIsNotNone(res['even'].element_type)
self.assertIsNotNone(res['odd'].element_type)
res_main = (
res[None]
| 'id_none' >> beam.ParDo(lambda e: [e]).with_input_types(int))
res_even = (
res['even']
| 'id_even' >> beam.ParDo(lambda e: [e]).with_input_types(int))
res_odd = (
res['odd']
| 'id_odd' >> beam.ParDo(lambda e: [e]).with_input_types(int))
assert_that(res_main, equal_to([]), label='none_check')
assert_that(res_even, equal_to([2]), label='even_check')
assert_that(res_odd, equal_to([1, 3]), label='odd_check')
with self.assertRaises(ValueError):
_ = res['undeclared tag']
def test_typed_dofn_multi_output_no_tags(self):
class MyDoFn(beam.DoFn):
def process(self, element):
if element % 2:
yield beam.pvalue.TaggedOutput('odd', element)
else:
yield beam.pvalue.TaggedOutput('even', element)
with TestPipeline() as p:
res = (p | beam.Create([1, 2, 3]) | beam.ParDo(MyDoFn()).with_outputs())
self.assertIsNotNone(res[None].element_type)
self.assertIsNotNone(res['even'].element_type)
self.assertIsNotNone(res['odd'].element_type)
res_main = (
res[None]
| 'id_none' >> beam.ParDo(lambda e: [e]).with_input_types(int))
res_even = (
res['even']
| 'id_even' >> beam.ParDo(lambda e: [e]).with_input_types(int))
res_odd = (
res['odd']
| 'id_odd' >> beam.ParDo(lambda e: [e]).with_input_types(int))
assert_that(res_main, equal_to([]), label='none_check')
assert_that(res_even, equal_to([2]), label='even_check')
assert_that(res_odd, equal_to([1, 3]), label='odd_check')
def test_typed_ptransform_fn_pre_hints(self):
# Test that type hints are propagated to the created PTransform.
# Decorator appears before type hints. This is the more common style.
@beam.ptransform_fn
@typehints.with_input_types(int)
def MyMap(pcoll):
return pcoll | beam.ParDo(lambda x: [x])
self.assertListEqual([1, 2, 3], [1, 2, 3] | MyMap())
with self.assertRaises(typehints.TypeCheckError):
_ = ['a'] | MyMap()
def test_typed_ptransform_fn_post_hints(self):
# Test that type hints are propagated to the created PTransform.
# Decorator appears after type hints. This style is required for Cython
# functions, since they don't accept assigning attributes to them.
@typehints.with_input_types(int)
@beam.ptransform_fn
def MyMap(pcoll):
return pcoll | beam.ParDo(lambda x: [x])
self.assertListEqual([1, 2, 3], [1, 2, 3] | MyMap())
with self.assertRaises(typehints.TypeCheckError):
_ = ['a'] | MyMap()
def test_typed_ptransform_fn_multi_input_types_pos(self):
@beam.ptransform_fn
@beam.typehints.with_input_types(str, int)
def multi_input(pcoll_tuple, additional_arg):
_, _ = pcoll_tuple
assert additional_arg == 'additional_arg'
with TestPipeline() as p:
pcoll1 = p | 'c1' >> beam.Create(['a'])
pcoll2 = p | 'c2' >> beam.Create([1])
_ = (pcoll1, pcoll2) | multi_input('additional_arg')
with self.assertRaises(typehints.TypeCheckError):
_ = (pcoll2, pcoll1) | 'fails' >> multi_input('additional_arg')
def test_typed_ptransform_fn_multi_input_types_kw(self):
@beam.ptransform_fn
@beam.typehints.with_input_types(strings=str, integers=int)
def multi_input(pcoll_dict, additional_arg):
_ = pcoll_dict['strings']
_ = pcoll_dict['integers']
assert additional_arg == 'additional_arg'
with TestPipeline() as p:
pcoll1 = p | 'c1' >> beam.Create(['a'])
pcoll2 = p | 'c2' >> beam.Create([1])
_ = {
'strings': pcoll1, 'integers': pcoll2
} | multi_input('additional_arg')
with self.assertRaises(typehints.TypeCheckError):
_ = {
'strings': pcoll2, 'integers': pcoll1
} | 'fails' >> multi_input('additional_arg')
class NativeTypesTest(unittest.TestCase):
def test_good_main_input(self):
@typehints.with_input_types(typing.Tuple[str, int])
def munge(s_i):
(s, i) = s_i
return (s + 's', i * 2)
result = [('apple', 5), ('pear', 3)] | beam.Map(munge)
self.assertEqual([('apples', 10), ('pears', 6)], sorted(result))
def test_bad_main_input(self):
@typehints.with_input_types(typing.Tuple[str, str])
def munge(s_i):
(s, i) = s_i
return (s + 's', i * 2)
with self.assertRaises(typehints.TypeCheckError):
[('apple', 5), ('pear', 3)] | beam.Map(munge)
def test_bad_main_output(self):
@typehints.with_input_types(typing.Tuple[int, int])
@typehints.with_output_types(typing.Tuple[str, str])
def munge(a_b):
(a, b) = a_b
return (str(a), str(b))
with self.assertRaises(typehints.TypeCheckError):
[(5, 4), (3, 2)] | beam.Map(munge) | 'Again' >> beam.Map(munge)
class SideInputTest(unittest.TestCase):
def _run_repeat_test(self, repeat):
self._run_repeat_test_good(repeat)
self._run_repeat_test_bad(repeat)
@OptionsContext(pipeline_type_check=True)
def _run_repeat_test_good(self, repeat):
# As a positional argument.
result = ['a', 'bb', 'c'] | beam.Map(repeat, 3)
self.assertEqual(['aaa', 'bbbbbb', 'ccc'], sorted(result))
# As a keyword argument.
result = ['a', 'bb', 'c'] | beam.Map(repeat, times=3)
self.assertEqual(['aaa', 'bbbbbb', 'ccc'], sorted(result))
def _run_repeat_test_bad(self, repeat):
# Various mismatches.
with self.assertRaises(typehints.TypeCheckError):
['a', 'bb', 'c'] | beam.Map(repeat, 'z')
with self.assertRaises(typehints.TypeCheckError):
['a', 'bb', 'c'] | beam.Map(repeat, times='z')
with self.assertRaises(typehints.TypeCheckError):
['a', 'bb', 'c'] | beam.Map(repeat, 3, 4)
if all(param.default == param.empty
for param in get_signature(repeat).parameters.values()):
with self.assertRaisesRegex(typehints.TypeCheckError,
r'(takes exactly|missing a required)'):
['a', 'bb', 'c'] | beam.Map(repeat)
def test_basic_side_input_hint(self):
@typehints.with_input_types(str, int)
def repeat(s, times):
return s * times
self._run_repeat_test(repeat)
def test_keyword_side_input_hint(self):
@typehints.with_input_types(str, times=int)
def repeat(s, times):
return s * times
self._run_repeat_test(repeat)
def test_default_typed_hint(self):
@typehints.with_input_types(str, int)
def repeat(s, times=3):
return s * times
self._run_repeat_test(repeat)
def test_default_untyped_hint(self):
@typehints.with_input_types(str)
def repeat(s, times=3):
return s * times
# No type checking on default arg.
self._run_repeat_test_good(repeat)
@OptionsContext(pipeline_type_check=True)
def test_varargs_side_input_hint(self):
@typehints.with_input_types(str, int)
def repeat(s, *times):
return s * times[0]
result = ['a', 'bb', 'c'] | beam.Map(repeat, 3)
self.assertEqual(['aaa', 'bbbbbb', 'ccc'], sorted(result))
if sys.version_info >= (3, ):
with self.assertRaisesRegex(
typehints.TypeCheckError,
r'requires Tuple\[int, ...\] but got Tuple\[str, ...\]'):
['a', 'bb', 'c'] | beam.Map(repeat, 'z')
def test_var_positional_only_side_input_hint(self):
# Test that a lambda that accepts only a VAR_POSITIONAL can accept
# side-inputs.
# TODO(BEAM-8247): There's a bug with trivial_inference inferring the output
# type when side-inputs are used (their type hints are not passed). Remove
# with_output_types(...) when this bug is fixed.
result = (['a', 'b', 'c']
| beam.Map(lambda *args: args, 5).with_input_types(
str, int).with_output_types(typehints.Tuple[str, int]))
self.assertEqual([('a', 5), ('b', 5), ('c', 5)], sorted(result))
if sys.version_info >= (3, ):
with self.assertRaisesRegex(
typehints.TypeCheckError,
r'requires Tuple\[Union\[int, str\], ...\] but got '
r'Tuple\[Union\[float, int\], ...\]'):
_ = [1.2] | beam.Map(lambda *_: 'a', 5).with_input_types(int, str)
def test_var_keyword_side_input_hint(self):
# Test that a lambda that accepts a VAR_KEYWORD can accept
# side-inputs.
result = (['a', 'b', 'c']
| beam.Map(lambda e, **kwargs:
(e, kwargs), kw=5).with_input_types(str, ignored=int))
self.assertEqual([('a', {
'kw': 5
}), ('b', {
'kw': 5
}), ('c', {
'kw': 5
})],
sorted(result))
if sys.version_info >= (3, ):
with self.assertRaisesRegex(
typehints.TypeCheckError,
r'requires Dict\[str, str\] but got Dict\[str, int\]'):
_ = (['a', 'b', 'c']
| beam.Map(lambda e, **_: 'a', kw=5).with_input_types(
str, ignored=str))
def test_deferred_side_inputs(self):
@typehints.with_input_types(str, int)
def repeat(s, times):
return s * times
with TestPipeline() as p:
main_input = p | beam.Create(['a', 'bb', 'c'])
side_input = p | 'side' >> beam.Create([3])
result = main_input | beam.Map(repeat, pvalue.AsSingleton(side_input))
assert_that(result, equal_to(['aaa', 'bbbbbb', 'ccc']))
bad_side_input = p | 'bad_side' >> beam.Create(['z'])
with self.assertRaises(typehints.TypeCheckError):
main_input | 'bis' >> beam.Map(repeat, pvalue.AsSingleton(bad_side_input))
def test_deferred_side_input_iterable(self):
@typehints.with_input_types(str, typing.Iterable[str])
def concat(glue, items):
return glue.join(sorted(items))
with TestPipeline() as p:
main_input = p | beam.Create(['a', 'bb', 'c'])
side_input = p | 'side' >> beam.Create(['x', 'y', 'z'])
result = main_input | beam.Map(concat, pvalue.AsIter(side_input))
assert_that(result, equal_to(['xayaz', 'xbbybbz', 'xcycz']))
bad_side_input = p | 'bad_side' >> beam.Create([1, 2, 3])
with self.assertRaises(typehints.TypeCheckError):
main_input | 'fail' >> beam.Map(concat, pvalue.AsIter(bad_side_input))
class CustomTransformTest(unittest.TestCase):
class CustomTransform(beam.PTransform):
def _extract_input_pvalues(self, pvalueish):
return pvalueish, (pvalueish['in0'], pvalueish['in1'])
def expand(self, pvalueish):
return {'out0': pvalueish['in0'], 'out1': pvalueish['in1']}
# TODO(robertwb): (typecheck) Make these the default?
def with_input_types(self, *args, **kwargs):
return WithTypeHints.with_input_types(self, *args, **kwargs)
def with_output_types(self, *args, **kwargs):
return WithTypeHints.with_output_types(self, *args, **kwargs)
test_input = {'in0': ['a', 'b', 'c'], 'in1': [1, 2, 3]}
def check_output(self, result):
self.assertEqual(['a', 'b', 'c'], sorted(result['out0']))
self.assertEqual([1, 2, 3], sorted(result['out1']))
def test_custom_transform(self):
self.check_output(self.test_input | self.CustomTransform())
def test_keyword_type_hints(self):
self.check_output(
self.test_input
| self.CustomTransform().with_input_types(in0=str, in1=int))
self.check_output(
self.test_input | self.CustomTransform().with_input_types(in0=str))
self.check_output(
self.test_input
| self.CustomTransform().with_output_types(out0=str, out1=int))
with self.assertRaises(typehints.TypeCheckError):
self.test_input | self.CustomTransform().with_input_types(in0=int)
with self.assertRaises(typehints.TypeCheckError):
self.test_input | self.CustomTransform().with_output_types(out0=int)
def test_flat_type_hint(self):
# Type hint is applied to both.
({
'in0': ['a', 'b', 'c'], 'in1': ['x', 'y', 'z']
}
| self.CustomTransform().with_input_types(str))
with self.assertRaises(typehints.TypeCheckError):
self.test_input | self.CustomTransform().with_input_types(str)
with self.assertRaises(typehints.TypeCheckError):
self.test_input | self.CustomTransform().with_input_types(int)
with self.assertRaises(typehints.TypeCheckError):
self.test_input | self.CustomTransform().with_output_types(int)
class AnnotationsTest(unittest.TestCase):
def test_pardo_wrapper_builtin_method(self):
th = beam.ParDo(str.strip).get_type_hints()
if sys.version_info < (3, 7):
self.assertEqual(th.input_types, ((str, ), {}))
else:
# Python 3.7+ has annotations for CPython builtins
# (_MethodDescriptorType).
self.assertEqual(th.input_types, ((str, typehints.Any), {}))
self.assertEqual(th.output_types, ((typehints.Any, ), {}))
def test_pardo_wrapper_builtin_type(self):
th = beam.ParDo(list).get_type_hints()
if sys.version_info < (3, 7):
self.assertEqual(
th.input_types,
((typehints.Any, typehints.decorators._ANY_VAR_POSITIONAL), {
'__unknown__keywords': typehints.decorators._ANY_VAR_KEYWORD
}))
else:
# Python 3.7+ supports signatures for builtins like 'list'.
self.assertEqual(th.input_types, ((typehints.Any, ), {}))
self.assertEqual(th.output_types, ((typehints.Any, ), {}))
def test_pardo_wrapper_builtin_func(self):
th = beam.ParDo(len).get_type_hints()
self.assertIsNone(th.input_types)
self.assertIsNone(th.output_types)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| -3,108,936,589,871,372,300
| 35.709677
| 80
| 0.621834
| false
| 3.283435
| true
| false
| false
|
kubeflow/katib
|
cmd/suggestion/nas/darts/v1beta1/main.py
|
1
|
1428
|
# Copyright 2021 The Kubeflow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc
from concurrent import futures
import time
from pkg.apis.manager.v1beta1.python import api_pb2_grpc
from pkg.apis.manager.health.python import health_pb2_grpc
from pkg.suggestion.v1beta1.nas.darts.service import DartsService
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
DEFAULT_PORT = "0.0.0.0:6789"
def serve():
print("Darts Suggestion Service")
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
service = DartsService()
api_pb2_grpc.add_SuggestionServicer_to_server(service, server)
health_pb2_grpc.add_HealthServicer_to_server(service, server)
server.add_insecure_port(DEFAULT_PORT)
print("Listening...")
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == "__main__":
serve()
|
apache-2.0
| 558,172,976,097,228,500
| 31.454545
| 74
| 0.727591
| false
| 3.552239
| false
| false
| false
|
arunkgupta/gramps
|
gramps/plugins/lib/maps/placeselection.py
|
1
|
9938
|
# -*- python -*-
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2011-2012 Serge Noiraud
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
from gramps.gen.ggettext import sgettext as _
import re
from gi.repository import GObject
import math
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
_LOG = logging.getLogger("maps.placeselection")
#-------------------------------------------------------------------------
#
# GTK/Gnome modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# Gramps Modules
#
#-------------------------------------------------------------------------
from gramps.gen.errors import WindowActiveError
from gramps.gui.managedwindow import ManagedWindow
from osmGps import OsmGps
#-------------------------------------------------------------------------
#
# Functions and variables
#
#-------------------------------------------------------------------------
PLACE_REGEXP = re.compile('<span background="green">(.*)</span>')
PLACE_STRING = '<span background="green">%s</span>'
def match(self, lat, lon, radius):
"""
coordinates matching.
"""
rds = float(radius)
self.places = []
# place
for entry in self.place_list:
if (math.hypot(lat-float(entry[3]),
lon-float(entry[4])) <= rds) == True:
# Do we already have this place ? avoid duplicates
self.get_location(entry[9])
if not [self.country, self.state, self.county] in self.places:
self.places.append([self.country, self.state, self.county])
return self.places
#-------------------------------------------------------------------------
#
# PlaceSelection
#
#-------------------------------------------------------------------------
class PlaceSelection(ManagedWindow, OsmGps):
"""
We show a selection box for possible places in a region of the map.
We can select the diameter of the region which is a circle.
Depending of this region, we can show the possible choice.
We select the value depending of our need which open the EditPlace box.
"""
def __init__(self, uistate, dbstate, maps, layer, places, lat, lon,
function, oldvalue=None):
"""
Place Selection initialization
"""
try:
ManagedWindow.__init__(self, uistate, [],
PlaceSelection)
except WindowActiveError:
return
self.uistate = uistate
self.dbstate = dbstate
self.lat = lat
self.lon = lon
self.osm = maps
self.country = None
self.state = None
self.county = None
self.radius = 1.0
self.circle = None
self.oldvalue = oldvalue
self.place_list = places
self.function = function
self.selection_layer = layer
self.layer = layer
alignment = Gtk.Alignment.new(0, 1, 0, 0)
self.set_window(
Gtk.Dialog(_('Place Selection in a region'),
buttons=(Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)),
None, _('Place Selection in a region'), None)
label = Gtk.Label(label=_('Choose the radius of the selection.\n'
'On the map you should see a circle or an'
' oval depending on the latitude.'))
alignment.add(label)
self.window.vbox.pack_start(alignment, False, True, 0)
adj = Gtk.Adjustment(1.0, 0.1, 3.0, 0.1, 0, 0)
# default value is 1.0, minimum is 0.1 and max is 3.0
slider = Gtk.Scale(orientation=Gtk.Orientation.HORIZONTAL,
adjustment=adj)
slider.set_digits(1)
slider.set_value_pos(Gtk.PositionType.BOTTOM)
slider.connect('value-changed', self.slider_change, self.lat, self.lon)
self.window.vbox.pack_start(slider, False, True, 0)
self.vadjust = Gtk.Adjustment(page_size=15)
self.scroll = Gtk.ScrolledWindow(self.vadjust)
self.scroll.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC)
self.scroll.set_shadow_type(Gtk.ShadowType.IN)
self.plist = Gtk.ListStore(str, str, str)
self.choices = Gtk.TreeView(self.plist)
self.scroll.add(self.choices)
self.renderer = Gtk.CellRendererText()
self.tvcol1 = Gtk.TreeViewColumn(_('Country'), self.renderer, markup=0)
self.tvcol2 = Gtk.TreeViewColumn(_('State'), self.renderer, markup=1)
self.tvcol3 = Gtk.TreeViewColumn(_('County'), self.renderer, markup=2)
self.tvcol1.set_sort_column_id(0)
self.tvcol2.set_sort_column_id(1)
self.tvcol3.set_sort_column_id(2)
self.choices.append_column(self.tvcol1)
self.choices.append_column(self.tvcol2)
self.choices.append_column(self.tvcol3)
self.window.vbox.pack_start(self.scroll, True, True, 0)
self.label2 = Gtk.Label()
self.label2.set_markup('<span background="green" foreground="black"'
'>%s</span>' %
_('The green values in the row correspond '
'to the current place values.'))
alignment = Gtk.Alignment.new(0, 1, 0, 0)
alignment.add(self.label2)
self.window.vbox.pack_start(alignment, False, True, 0)
self.window.set_default_size(400, 300)
self.choices.connect('row-activated', self.selection, function)
self.window.connect('response', self.close)
self.window.show_all()
self.show()
self.label2.hide()
self.slider_change(None, lat, lon)
def close(self, *obj):
"""
Close the selection place editor
"""
self.hide_the_region()
ManagedWindow.close(self, *obj)
def slider_change(self, obj, lat, lon):
"""
Display on the map a circle in which we select all the places inside this region.
"""
self.radius = obj.get_value() if obj else 1.0
self.show_the_region(self.radius)
match(self, lat, lon, self.radius)
self.plist.clear()
if self.oldvalue != None:
# The old values are always in the first row.
# In this case, we change the color of the row.
# display the associated message
self.label2.show()
field1, field2, field3 = self.oldvalue
self.plist.append((PLACE_STRING % field1,
PLACE_STRING % field2,
PLACE_STRING % field3)
)
for place in self.places:
self.plist.append(place)
# here, we could add value from geography names services ...
# if we found no place, we must create a default place.
self.plist.append((_("New place with empty fields"), "", "..."))
def hide_the_region(self):
"""
Hide the layer which contains the circle
"""
layer = self.get_selection_layer()
if layer:
self.remove_layer(layer)
def show_the_region(self, rds):
"""
Show a circle in which we select the places.
"""
# circle (rds)
self.hide_the_region()
self.selection_layer = self.add_selection_layer()
self.selection_layer.add_circle(rds, self.lat, self.lon)
def get_location(self, place):
"""
get location values
"""
place = self.dbstate.db.get_place_from_gramps_id(place)
loc = place.get_main_location()
data = loc.get_text_data_list()
# new background or font color on gtk fields ?
self.country = data[6]
self.state = data[5]
self.county = data[4]
return(self.country, self.state, self.county)
def selection(self, obj, index, column, function):
"""
get location values and call the real function : add_place, edit_place
"""
if self.plist[index][2] == "...":
# case with blank values ( New place with empty fields )
self.function( "", "", "", self.lat, self.lon)
elif self.plist[index][0][1:5] == "span":
# case with old values ( keep the old values of the place )
name = PLACE_REGEXP.search(self.plist[index][0], 0)
country = name.group(1)
name = PLACE_REGEXP.search(self.plist[index][1], 0)
state = name.group(1)
name = PLACE_REGEXP.search(self.plist[index][2], 0)
county = name.group(1)
self.function( country, county, state, self.lat, self.lon)
else:
# Set the new values of the country, county and state fields.
self.function( self.plist[index][0], self.plist[index][2],
self.plist[index][1], self.lat, self.lon)
|
gpl-2.0
| -5,166,526,136,838,341,000
| 37.972549
| 89
| 0.543872
| false
| 4.132225
| false
| false
| false
|
zenpoy/pokerstats
|
app.py
|
1
|
3503
|
import os
from flask import Flask, request, jsonify
from mongoengine import *
import datetime
app = Flask(__name__)
mongodb_uri = os.environ.get('MONGODB_URI', 'localhost:27017')
connect("pokerstats", host=mongodb_uri)
class Player(Document):
name = StringField(required=True, unique=True, max_length=200)
class Record(Document):
player = ReferenceField("Player", required=True)
game = ReferenceField("Game", required=True)
cash_in = FloatField()
good_all_in = ListField(field=DateTimeField)
bad_all_in = ListField(field=DateTimeField)
cash_out = FloatField()
class Game(Document):
name = StringField(max_length=200)
date = DateTimeField()
cash = FloatField()
@app.route('/', methods=['GET'])
@app.route('/players', methods=['GET'])
def get_players():
return Player.objects.to_json()
@app.route('/players/<player_id>', methods=['GET'])
def get_player(player_id):
p = Player.objects(id=player_id)
return p.to_json(), 200
@app.route('/players', methods=['POST'])
def create_player():
# TODO: add check for is json
json_data = request.get_json()
p = Player(**json_data)
try:
p.save()
except NotUniqueError as e:
return jsonify({'error' : e.message}), 200
return p.to_json(), 201
@app.route('/players/<player_id>', methods=['DELETE'])
def delete_player(player_id):
Player.objects(id=player_id).delete()
return jsonify({}), 200
@app.route('/players/<player_id>', methods=['PUT'])
def update_player(player_id):
# TODO: add check for is json
json_data = request.get_json()
p = Player.objects(id=player_id)
p.update(**json_data)
return p.to_json(), 200
@app.route('/games', methods=['GET'])
def get_games():
return Game.objects.to_json()
@app.route('/games/<game_id>', methods=['GET'])
def get_game(game_id):
p = Game.objects(id=game_id)
return p.to_json(), 200
@app.route('/games', methods=['POST'])
def create_game():
# TODO: add check for is json
json_data = request.get_json()
p = Game(**json_data)
try:
p.save()
except NotUniqueError as e:
return jsonify({'error' : e.message}), 200
return p.to_json(), 201
@app.route('/games/<game_id>', methods=['DELETE'])
def delete_game(game_id):
Game.objects(id=game_id).delete()
return jsonify({}), 200
@app.route('/games/<game_id>', methods=['PUT'])
def update_game(game_id):
# TODO: add check for is json
json_data = request.get_json()
p = Game.objects(id=game_id)
p.update(**json_data)
return p.to_json(), 200
@app.route('/records', methods=['GET'])
def get_records():
return Record.objects.to_json()
@app.route('/records/<record_id>', methods=['GET'])
def get_record(record_id):
p = Record.objects(id=record_id)
return p.to_json(), 200
@app.route('/records', methods=['POST'])
def create_record():
# TODO: add check for is json
json_data = request.get_json()
p = Record(**json_data)
try:
p.save()
except NotUniqueError as e:
return jsonify({'error' : e.message}), 200
return p.to_json(), 201
@app.route('/records/<record_id>', methods=['DELETE'])
def delete_record(record_id):
Record.objects(id=record_id).delete()
return jsonify({}), 200
@app.route('/records/<record_id>', methods=['PUT'])
def update_record(record_id):
# TODO: add check for is json
json_data = request.get_json()
p = Record.objects(id=record_id)
p.update(**json_data)
return p.to_json(), 200
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
# connect to the mongodb database
|
mit
| -8,197,957,731,518,686,000
| 24.764706
| 63
| 0.675992
| false
| 2.92404
| false
| false
| false
|
srajag/nova
|
nova/objects/ec2.py
|
1
|
3208
|
# Copyright 2014 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova import exception
from nova.objects import base
from nova.objects import fields
class EC2InstanceMapping(base.NovaPersistentObject, base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.IntegerField(),
'uuid': fields.UUIDField(),
}
@staticmethod
def _from_db_object(context, imap, db_imap):
for field in imap.fields:
imap[field] = db_imap[field]
imap._context = context
imap.obj_reset_changes()
return imap
@base.remotable
def create(self, context):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
db_imap = db.ec2_instance_create(context, self.uuid)
self._from_db_object(context, self, db_imap)
@base.remotable_classmethod
def get_by_uuid(cls, context, instance_uuid):
db_imap = db.ec2_instance_get_by_uuid(context, instance_uuid)
if db_imap:
return cls._from_db_object(context, cls(), db_imap)
@base.remotable_classmethod
def get_by_id(cls, context, ec2_id):
db_imap = db.ec2_instance_get_by_id(context, ec2_id)
if db_imap:
return cls._from_db_object(context, cls(), db_imap)
class EC2VolumeMapping(base.NovaPersistentObject, base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.IntegerField(),
'uuid': fields.UUIDField(),
}
@staticmethod
def _from_db_object(context, vmap, db_vmap):
for field in vmap.fields:
vmap[field] = db_vmap[field]
vmap._context = context
vmap.obj_reset_changes()
return vmap
@base.remotable
def create(self, context):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
db_vmap = db.ec2_volume_create(context, self.uuid)
self._from_db_object(context, self, db_vmap)
@base.remotable_classmethod
def get_by_uuid(cls, context, volume_uuid):
db_vmap = db.ec2_volume_get_by_uuid(context, volume_uuid)
if db_vmap:
return cls._from_db_object(context, cls(context), db_vmap)
@base.remotable_classmethod
def get_by_id(cls, context, ec2_id):
db_vmap = db.ec2_volume_get_by_id(context, ec2_id)
if db_vmap:
return cls._from_db_object(context, cls(context), db_vmap)
|
apache-2.0
| -248,260,300,879,144,450
| 33.12766
| 78
| 0.62687
| false
| 3.604494
| false
| false
| false
|
jbrendel/RESTx
|
src/python/restx/components/TwitterComponent.py
|
1
|
8443
|
"""
RESTx: Sane, simple and effective data publishing and integration.
Copyright (C) 2010 MuleSoft Inc. http://www.mulesoft.com
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
"""
A test component.
"""
# Python imports
import urllib
# RESTx imports
from restx.components.api import *
class TwitterComponent(BaseComponent):
NAME = "TwitterComponent"
PARAM_DEFINITION = {
"account_name" : ParameterDef(PARAM_STRING, "Twitter account name"),
"account_password" : ParameterDef(PARAM_PASSWORD, "Password")
}
DESCRIPTION = "Provides access to a Twitter account."
DOCUMENTATION = \
"""The Twitter component is designed to provide access to a Twitter account.
It can be used to get as well as update status, or to view the timeline of
a Twitter account.
To create the resource, the Twitter account name and password need to be specified.
"""
SERVICES = {
"status" : { "desc" : "You can GET the status or POST a new status to it." },
"timeline" : {
"desc" : "You can GET the timeline of the user.",
"params" : {
"count" : ParameterDef(PARAM_NUMBER, "Number of results", required=False, default=20),
"filter" : ParameterDef(PARAM_BOOL, "If set, only 'important' fields are returned", required=False, default=True),
},
},
"home_timeline" : {
"desc" : "You can GET the home timeline of the user.",
"params" : {
"count" : ParameterDef(PARAM_NUMBER, "Number of results", required=False, default=20),
"filter" : ParameterDef(PARAM_BOOL, "If set, only 'important' fields are returned", required=False, default=True),
},
},
}
def __get_status(self, accountname):
"""
Get a the latest twitter status for the specified account.
@param accountname: Name of the account for which we get the status.
@type accountname: string
@return: The status text.
@rtype: string
"""
# Get the status for this account from Twitter (we get it in JSON format)
code, data = self.httpGet("http://api.twitter.com/1/users/show.json?screen_name=%s" % accountname)
if code == HTTP.OK:
obj = self.fromJson(data)
else:
return "Problem with Twitter: " + data
# Return the requested information, in this case the latest status
return obj['status']['text']
def __post_status(self, accountname, password, input):
"""
Post a the new twitter status for the specified account.
@param accountname: Name of the account for which we post the status.
@type accountname: string
@param password: The password for this account.
@type password: string
@param input: The new status update text.
@type input: string
@return: The status text.
@rtype: string
"""
# Send a new status string to the Twitter account
self.httpSetCredentials(accountname, password)
code, data = self.httpPost("http://api.twitter.com/1/statuses/update.xml",
"status=%s" % input)
data = "Status updated"
# Return the requested information, in this case the latest status
return data
def __result_filter(self, data):
"""
Filter timeline results to contain only the most essential information.
"""
r = list()
for elem in data:
u = elem['user']
user = dict(screen_name=u['screen_name'], name=u['name'], followers=u['followers_count'])
message = dict(date=elem['created_at'], Text=elem['text'], id=elem['id'],
reply="http://twitter.com/?status=@%s&in_reply_to_status_id=%s&in_reply_to=%s" % (u['screen_name'], elem['id'], u['screen_name']))
r.append(dict(message=message, user=user))
return r
def status(self, method, input):
"""
Gets or updates the twitter status for the specified account.
@param method: The HTTP request method.
@type method: string
@param input: Any data that came in the body of the request.
@type input: string
@return: The output data of this service.
@rtype: string
"""
# Get my parameters
if method == HTTP.GET:
return Result.ok(self.__get_status(self.account_name))
elif method == HTTP.POST:
return Result.ok(self.__post_status(self.account_name, self.account_password, input))
else:
return Result.methodNotAllowed("Only supporting GET and POST for this resource")
def timeline(self, method, input, count, filter):
"""
Get the user's timeline.
@param request: Information about the HTTP request.
@type request: RestxHttpRequest
@param input: Any data that came in the body of the request.
@type input: string
@param params: Dictionary of parameter values.
@type params: dict
@param method: The HTTP request method.
@type method: string
@return: The output data of this service.
@rtype: string
"""
# Get my parameters
self.httpSetCredentials(self.account_name, self.account_password)
if count > 0:
count_param = "?count=%s" % count
else:
count_param = ""
code, obj_str = self.httpGet("http://api.twitter.com/1/statuses/user_timeline.json"+count_param)
if code == HTTP.OK:
obj = self.fromJson(obj_str)
else:
obj = obj_str
if filter:
obj = self.__result_filter(obj)
return Result.ok(obj)
def home_timeline(self, method, input, count, filter):
"""
Get the user's home timeline (also contains tweets from friends).
@param request: Information about the HTTP request.
@type request: RestxHttpRequest
@param input: Any data that came in the body of the request.
@type input: string
@param params: Dictionary of parameter values.
@type params: dict
@param method: The HTTP request method.
@type method: string
@return: The output data of this service.
@rtype: string
"""
# Get my parameters
self.httpSetCredentials(self.account_name, self.account_password)
if count > 0:
count_param = "?count=%s" % count
else:
count_param = ""
code, obj_str = self.httpGet("http://api.twitter.com/1/statuses/home_timeline.json"+count_param)
if code == HTTP.OK:
obj = self.fromJson(obj_str)
else:
obj = obj_str
if filter:
obj = self.__result_filter(obj)
return Result.ok(obj)
|
gpl-3.0
| -6,494,160,187,986,690,000
| 37.20362
| 160
| 0.540803
| false
| 4.633919
| false
| false
| false
|
kmackay/emk
|
modules/c.py
|
1
|
22769
|
import os
import logging
import shlex
import re
import sys
import traceback
log = logging.getLogger("emk.c")
utils = emk.module("utils")
fix_path_regex = re.compile(r'[\W]+')
class _GccCompiler(object):
"""
Compiler class for using gcc/g++ to compile C/C++ respectively.
In order for the emk c module to use a compiler instance, the compiler class must define the following methods:
load_extra_dependencies
compile_c
compile_cxx
See the documentation for those functions in this class for more details.
Properties (defaults set based on the path prefix passed to the constructor):
c_path -- The path of the C compiler (eg "gcc").
cxx_path -- The path of the C++ compiler (eg "g++").
"""
def __init__(self, path_prefix=""):
"""
Create a new GccCompiler instance.
Arguments:
path_prefix -- The prefix to use for the gcc/g++ executables. For example, if you had a 32-bit Linux cross compiler
installed into /cross/linux, you might use 'c.compiler = c.GccCompiler("/cross/linux/bin/i686-pc-linux-gnu-")'
to configure the c module to use the cross compiler. The default value is "" (ie, use the system gcc/g++).
"""
self.name = "gcc"
self.c_path = path_prefix + "gcc"
self.cxx_path = path_prefix + "g++"
def load_extra_dependencies(self, path):
"""
Load extra dependencies for the given object file path. The extra dependencies could be loaded from a generated
dependency file for that path, or loaded from the emk.scope_cache(path) (or some other mechanism).
Arguments:
path -- The path of the object file to get dependencies for.
Returns a list of paths (strings) of all the extra dependencies.
"""
cache = emk.scope_cache(path)
return cache.get("secondary_deps", [])
def depfile_args(self, dep_file):
"""
Returns a list of arguments to write secondary dependencies to the given dep_file path.
"""
return ["-Wp,-MMD,%s" % (dep_file)]
def compile(self, exe, source, dest, includes, defines, flags):
dep_file = dest + ".dep"
args = [exe]
args.extend(self.depfile_args(dep_file))
args.extend(["-I%s" % (emk.abspath(d)) for d in includes])
args.extend(["-D%s=%s" % (key, value) for key, value in defines.items()])
args.extend(utils.flatten(flags))
args.extend(["-o", dest, "-c", source])
utils.call(args, print_stderr=False)
try:
with open(dep_file, "r") as f:
data = f.read()
data = data.replace("\\\n", "")
items = shlex.split(data)
unique_items = [emk.abspath(item) for item in (set(items[2:]) - set([""]))]
# call has_changed to set up rule cache for future builds.
for item in unique_items:
emk.current_rule.has_changed(item)
cache = emk.scope_cache(dest)
cache["secondary_deps"] = unique_items
except IOError:
log.error("Failed to open depfile %s", dep_file)
utils.rm(dep_file)
def compile_c(self, source, dest, includes, defines, flags):
"""
Compile a C source file into an object file.
Arguments:
source -- The C source file path to compile.
dest -- The output object file path.
includes -- A list of extra include directories.
defines -- A dict of <name>: <value> entries to be used as defines; each entry is equivalent to #define <name> <value>.
flags -- A list of additional flags. This list may contain tuples; to flatten the list, you could use the emk utils module:
'flattened = utils.flatten(flags)'.
"""
self.compile(self.c_path, source, dest, includes, defines, flags)
def compile_cxx(self, source, dest, includes, defines, flags):
"""
Compile a C++ source file into an object file.
Arguments:
source -- The C++ source file path to compile.
dest -- The output object file path.
includes -- A list of extra include directories.
defines -- A dict of <name>: <value> entries to be used as defines; each entry is equivalent to #define <name> <value>.
flags -- A list of additional flags. This list may contain tuples; to flatten the list, you could use the emk utils module:
'flattened = utils.flatten(flags)'.
"""
self.compile(self.cxx_path, source, dest, includes, defines, flags)
def obj_ext(self):
"""
Get the extension of object files built by this compiler.
"""
return ".o"
class _ClangCompiler(_GccCompiler):
"""
A compiler class for compiling using clang.
Properties:
lipo_path -- The path of the 'lipo' executable.
libtool_path -- The path of the 'libtool' executable.
"""
def __init__(self, path_prefix=""):
super(_ClangCompiler, self).__init__(path_prefix)
self.name = "clang"
self.c_path = path_prefix + "clang"
self.cxx_path = path_prefix + "clang++"
class _MsvcCompiler(object):
"""
Compiler class for using Microsoft's Visual C++ to compile C/C++.
In order for the emk c module to use a compiler instance, the compiler class must define the following methods:
load_extra_dependencies
compile_c
compile_cxx
See the documentation for those functions in this class for more details.
"""
def __init__(self, path_prefix=None, env_script="vcvarsall.bat", target_arch=None):
"""
Create a new MsvcCompiler instance.
Arguments:
path_prefix -- The prefix to use for the vcvarsall.bat file. The default value is derived from the VS*COMNTOOLS environment variable.
Properties:
cl_exe -- The absolute path to the cl executable.
"""
from link import _MsvcLinker
self.name = "msvc"
self._env = _MsvcLinker.vs_env(path_prefix, env_script, target_arch)
self._dep_re = re.compile(r'Note:\s+including file:\s+([^\s].*)\s*')
self.cl_exe = "cl.exe"
def load_extra_dependencies(self, path):
"""
Load extra dependencies for the given object file path. The extra dependencies could be loaded from a generated
dependency file for that path, or loaded from the emk.scope_cache(path) (or some other mechanism).
Arguments:
path -- The path of the object file to get dependencies for.
Returns a list of paths (strings) of all the extra dependencies.
"""
cache = emk.scope_cache(path)
return cache.get("secondary_deps", [])
def compile(self, source, dest, includes, defines, flags):
args = [self.cl_exe, "/nologo", "/c", "/showIncludes"]
args.extend(['/I%s' % (emk.abspath(d)) for d in includes])
args.extend(['/D%s=%s' % (key, value) for key, value in defines.items()])
args.extend(utils.flatten(flags))
args.extend(['/Fo%s' % dest, source])
stdout, stderr, returncode = utils.call(args, env=self._env, print_stdout=False, print_stderr=False, error_stream="both")
items = []
for l in stdout.splitlines():
m = self._dep_re.match(l)
if m:
items.append(m.group(1))
unique_items = utils.unique_list(items)
# call has_changed to set up rule cache for future builds.
for item in unique_items:
emk.current_rule.has_changed(item)
cache = emk.scope_cache(dest)
cache["secondary_deps"] = unique_items
def compile_c(self, source, dest, includes, defines, flags):
"""
Compile a C source file into an object file.
Arguments:
source -- The C source file path to compile.
dest -- The output object file path.
includes -- A list of extra include directories.
defines -- A dict of <name>: <value> entries to be used as defines; each entry is equivalent to #define <name> <value>.
flags -- A list of additional flags. This list may contain tuples; to flatten the list, you could use the emk utils module:
'flattened = utils.flatten(flags)'.
"""
if "/TC" not in flags:
flags.extend(["/TC"])
self.compile(source, dest, includes, defines, flags)
def compile_cxx(self, source, dest, includes, defines, flags):
"""
Compile a C++ source file into an object file.
Arguments:
source -- The C++ source file path to compile.
dest -- The output object file path.
includes -- A list of extra include directories.
defines -- A dict of <name>: <value> entries to be used as defines; each entry is equivalent to #define <name> <value>.
flags -- A list of additional flags. This list may contain tuples; to flatten the list, you could use the emk utils module:
'flattened = utils.flatten(flags)'.
"""
if "/TP" not in flags:
flags.extend(["/TP"])
self.compile(source, dest, includes, defines, flags)
def obj_ext(self):
"""
Get the extension of object files built by this compiler.
"""
return ".obj"
class Module(object):
"""
emk module for compiling C and C++ code. Depends on the link module (and utils).
This module defines emk rules during the prebuild stage, to allow autodiscovery of generated source files
from rules defined before the prebuild stage (ie, in the post_rules() method of other modules). See the
autodetect and autodetect_from_targets properties for more information about autodiscovery of source files.
This module adds the compiled object files to the link module, which will link them into libraries/executables as desired.
The object files are added to the link module's 'objects' property (each mapped to the source file that the object file
was built from), so that the link module can autodetect main() functions from the source (if link.detect_exe == "simple").
See the link module documentation for details of main() autodetection.
The c module also sets the link module's link_cxx flag if there are any C++ source files being compiled.
Note that the compilation rules are not built automatically; the link module (or other modules/user code)
is responsible for marking the object files as autobuild if desired.
Classes:
GccCompiler -- A compiler class that uses gcc/g++ to compile.
ClangCompiler -- A compiler class that uses clang/clang++ to compile.
MsvcCompiler -- A compiler class that uses MSVC on Windows to compile binaries.
Properties (inherited from parent scope):
compiler -- The compiler instance that is used to load dependencies and compile C/C++ code.
include_dirs -- A list of additional include directories for both C and C++ code.
defines -- A dict of <name>: <value> defines for both C and C++; each entry is equivalent to #define <name> <value>.
flags -- A list of flags for both C and C++. If you have a 'flag' that is more than one argument, pass it as a tuple.
Example: ("-isystem", "/path/to/extra/sys/includes"). Duplicate flags will be removed.
source_files -- A list of files that should be included for compilation. Files will be built as C or C++ depending on the file extension.
c.exts -- The list of file extensions (suffixes) that will be considered as C code. The default is [".c"].
c.include_dirs -- A list of additional include directories for C code.
c.defines -- A dict of <name>: <value> defines for C.
c.flags -- A list of flags for C.
c.source_files -- A list of C files that should be included for compilation (will be built as C code).
cxx.exts -- The list of file extensions (suffixes) that will be considered as C++ code. The default is [".cpp", ".cxx", ".c++", ".cc"].
cxx.include_dirs -- A list of additional include directories for C++ code.
cxx.defines -- A dict of <name>: <value> defines for C++.
cxx.flags -- A list of flags for C++.
cxx.source_files -- A list of C++ files that should be included for compilation (will be built as C++ code).
autodetect -- Whether or not to autodetect files to build from the scope directory. All files that match the c.exts suffixes
will be compiled as C, and all files that match the cxx.exts suffixes will be compiled as C++. Autodetection
does not take place until the prebuild stage, so that autodetection of generated code can gather as many targets
as possible (see autodetect_from_targets). The default value is True.
autodetect_from_targets -- Whether or not to autodetect generated code based on rules defined in the current scope.
The default value is True.
excludes -- A list of source files to exclude from compilation.
non_lib_src -- A list of source files that will not be linked into a library for this directory (passed to the link module).
non_exe_src -- A list of source files that will not be linked into an executable, even if they contain a main() function.
unique_names -- If True, the output object files will be named according to the path from the project directory, to avoid
naming conflicts when the build directory is not a relative path. The default value is False.
If True, the link module's unique_names property will also be set to True.
obj_funcs -- A list of functions that are run for each generated object file path.
obj_ext -- The file extension for object files generated by the compiler (eg ".o" for gcc or ".obj" for MSVC). This property is
read-only as its value is provided by the compiler implementation.
"""
def __init__(self, scope, parent=None):
self.GccCompiler = _GccCompiler
self.ClangCompiler = _ClangCompiler
self.MsvcCompiler = _MsvcCompiler
self.link = emk.module("link")
self.c = emk.Container()
self.cxx = emk.Container()
if parent:
self.compiler = parent.compiler
self.include_dirs = list(parent.include_dirs)
self.defines = parent.defines.copy()
self.flags = list(parent.flags)
self.source_files = list(parent.source_files)
self.c.exts = list(parent.c.exts)
self.c.include_dirs = list(parent.c.include_dirs)
self.c.defines = parent.c.defines.copy()
self.c.flags = list(parent.c.flags)
self.c.source_files = list(parent.c.source_files)
self.cxx.exts = list(parent.cxx.exts)
self.cxx.include_dirs = list(parent.cxx.include_dirs)
self.cxx.defines = parent.cxx.defines.copy()
self.cxx.flags = list(parent.cxx.flags)
self.cxx.source_files = list(parent.cxx.source_files)
self.autodetect = parent.autodetect
self.autodetect_from_targets = parent.autodetect_from_targets
self.excludes = list(parent.excludes)
self.non_lib_src = list(parent.non_lib_src)
self.non_exe_src = list(parent.non_exe_src)
self.obj_funcs = list(parent.obj_funcs)
self.unique_names = parent.unique_names
else:
if sys.platform == "darwin":
self.compiler = self.ClangCompiler()
else:
self.compiler = self.GccCompiler()
self.include_dirs = []
self.defines = {}
self.flags = []
self.source_files = []
self.c.include_dirs = []
self.c.defines = {}
self.c.flags = []
self.c.exts = [".c"]
self.c.source_files = []
self.cxx.include_dirs = []
self.cxx.defines = {}
self.cxx.flags = []
self.cxx.exts = [".cpp", ".cxx", ".c++", ".cc"]
self.cxx.source_files = []
self.autodetect = True
self.autodetect_from_targets = True
self.excludes = []
self.non_lib_src = []
self.non_exe_src = []
self.obj_funcs = []
self.unique_names = False
@property
def obj_ext(self):
return self.compiler.obj_ext()
def new_scope(self, scope):
return Module(scope, parent=self)
def _matches_exts(self, file_path, exts):
for ext in exts:
if file_path.endswith(ext):
return True
return False
def post_rules(self):
if emk.cleaning:
return
emk.do_prebuild(self._prebuild)
if self.unique_names and self.link:
self.link.unique_names = True
def _prebuild(self):
c_sources = set()
cxx_sources = set()
self._non_exe_src = set(self.non_exe_src)
self._non_lib_src = set(self.non_lib_src)
if self.autodetect:
if self.autodetect_from_targets:
target_c_files = [t for t in emk.local_targets.keys() if self._matches_exts(t, self.c.exts)]
if target_c_files:
log.debug("Detected generated C files: %s", target_c_files)
self.c.source_files.extend(target_c_files)
target_cxx_files = [t for t in emk.local_targets.keys() if self._matches_exts(t, self.cxx.exts)]
if target_cxx_files:
log.debug("Detected generated C++ files: %s", target_cxx_files)
self.cxx.source_files.extend(target_cxx_files)
files = set(self.source_files)
files.update([f for f in os.listdir(emk.scope_dir) if os.path.isfile(f)])
for file_path in files:
if self._matches_exts(file_path, self.c.exts):
self.c.source_files.append(file_path)
if self._matches_exts(file_path, self.cxx.exts):
self.cxx.source_files.append(file_path)
for f in self.c.source_files:
if f in self.excludes:
continue
c_sources.add(f)
for f in self.cxx.source_files:
if f in self.excludes:
continue
cxx_sources.add(f)
c_includes = utils.unique_list(self.include_dirs + self.c.include_dirs)
c_flags = utils.unique_list(self.flags + self.c.flags)
c_defines = dict(self.defines)
c_defines.update(self.c.defines)
c_args = (False, c_includes, c_defines, c_flags)
cxx_includes = utils.unique_list(self.include_dirs + self.cxx.include_dirs)
cxx_flags = utils.unique_list(self.flags + self.cxx.flags)
cxx_defines = dict(self.defines)
cxx_defines.update(self.cxx.defines)
cxx_args = (True, cxx_includes, cxx_defines, cxx_flags)
objs = {}
for src in c_sources:
self._add_rule(objs, src, c_args)
for src in cxx_sources:
self._add_rule(objs, src, cxx_args)
if self.link:
self.link.objects.update([(os.path.join(emk.build_dir, obj + self.obj_ext), src) for obj, src in objs.items()])
if cxx_sources:
self.link.link_cxx = True
def _add_rule(self, objs, src, args):
fname = os.path.basename(src)
n, ext = os.path.splitext(fname)
if self.unique_names:
relpath = fix_path_regex.sub('_', os.path.relpath(emk.scope_dir, emk.proj_dir))
n = relpath + "_" + n
name = n
c = 1
while name in objs:
name = "%s_%s" % (n, c)
c += 1
objs[name] = src
if self.link:
objpath = os.path.join(emk.build_dir, name + self.obj_ext)
if src in self._non_exe_src:
self.link.non_exe_objs.append(objpath)
if src in self._non_lib_src:
self.link.non_lib_objs.append(objpath)
dest = os.path.join(emk.build_dir, name + self.obj_ext)
requires = [src]
extra_deps = None
if self.compiler:
extra_deps = self.compiler.load_extra_dependencies(emk.abspath(dest))
if extra_deps is None:
requires.append(emk.ALWAYS_BUILD)
emk.rule(self.do_compile, dest, requires, *args, cwd_safe=True, ex_safe=True)
if extra_deps:
emk.weak_depend(dest, extra_deps)
for f in self.obj_funcs:
f(dest)
def do_compile(self, produces, requires, cxx, includes, defines, flags):
"""
Rule function to compile a source file into an object file.
The compiler instance will also produce an <object file>.dep file that contains additional dependencies (ie, header files).
Arguments:
produces -- The path to the object file that will be produced.
requires -- The list of dependencies; the source file should be first.
cxx -- If True, the source file will be compiled as C++; otherwise it will be compiled as C.
includes -- A list of additional include directories.
defines -- A dict of <name>: <value> entries to be defined (like #define <name> <value>).
flags -- A list of flags to pass to the compiler. Compound flags should be in a tuple, eg: ("-isystem", "/path/to/extra/sys/includes").
"""
if not self.compiler:
raise emk.BuildError("No compiler defined!")
try:
if cxx:
self.compiler.compile_cxx(requires[0], produces[0], includes, defines, flags)
else:
self.compiler.compile_c(requires[0], produces[0], includes, defines, flags)
except:
utils.rm(produces[0])
utils.rm(produces[0] + ".dep")
raise
|
bsd-2-clause
| 672,571,377,680,635,400
| 43.998024
| 149
| 0.587421
| false
| 4.121832
| false
| false
| false
|
JFK422/Hitch
|
components/Menu/menuEditTab.py
|
1
|
2326
|
import qtawesome as qta
from components.Menu import menuSeperator
from PyQt5 import QtGui, QtCore, QtWidgets
#Menu widget placed in the stack of vLPart
class MenuEdit(QtWidgets.QWidget):
def setup(self):
vMenu = QtWidgets.QVBoxLayout()
vMenu.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignTop)
vMenu.setContentsMargins(QtCore.QMargins(20,20,20,20))
scrollLay = QtWidgets.QVBoxLayout()
scrollLay.setContentsMargins(QtCore.QMargins(10,10,10,10))
scrollLay.setAlignment(QtCore.Qt.AlignTop)
scrollLayWid = QtWidgets.QWidget()
scrollLayWid.setObjectName("scrollMenuLay")
scrollLayWid.setLayout(scrollLay)
fileText = QtWidgets.QLabel("Edit")
fileText.setObjectName("menuTitle")
vMenu.addWidget(fileText)
scroll = QtWidgets.QScrollArea()
scroll.setWidget(scrollLayWid)
scroll.setWidgetResizable(True)
vMenu.addWidget(scroll)
#Add icons later!
undo = QtWidgets.QPushButton("Undo")
undo.setMaximumHeight(50)
undo.setObjectName("scrollMenuItem")
scrollLay.addWidget(undo)
redo = QtWidgets.QPushButton("Redo")
redo.setMaximumHeight(50)
redo.setObjectName("scrollMenuItem")
scrollLay.addWidget(redo)
sep = menuSeperator.MenuSeperator()
sep.setup()
scrollLay.addWidget(sep)
cut = QtWidgets.QPushButton("Cut")
cut.setMaximumHeight(50)
cut.setObjectName("scrollMenuItem")
scrollLay.addWidget(cut)
copy = QtWidgets.QPushButton("Copy")
copy.setMaximumHeight(50)
copy.setObjectName("scrollMenuItem")
scrollLay.addWidget(copy)
paste = QtWidgets.QPushButton("Paste")
paste.setMaximumHeight(50)
paste.setObjectName("scrollMenuItem")
scrollLay.addWidget(paste)
sep2 = menuSeperator.MenuSeperator()
sep2.setup()
scrollLay.addWidget(sep2)
search = QtWidgets.QLineEdit("")
search.setPlaceholderText("Search")
search.setMinimumHeight(50)
search.setObjectName("menuSearch")
scrollLay.addWidget(search)
self.setSizePolicy(QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding))
self.setLayout(vMenu)
|
gpl-3.0
| 5,954,955,563,805,491,000
| 31.774648
| 115
| 0.674549
| false
| 4.09507
| false
| false
| false
|
dkeester/learn_python
|
file_read_print/file_read_print.py
|
1
|
1748
|
# /usr/bin/env python3
# -*- coding: utf-8 -*-
# A program to figure out the basics of file I/O
data = """\
I met a traveller from an antique land
Who said: Two vast and trunkless legs of stone
Stand in the desert. Near them, on the sand,
Half sunk, a shattered visage lies, whose frown,
And wrinkled lip, and sneer of cold command,
Tell that its sculptor well those passions read
Which yet survive, stamped on these lifeless things,
The hand that mocked them and the heart that fed:
And on the pedestal these words appear:
'My name is Ozymandias, king of kings:
Look on my works, ye Mighty, and despair!'
Nothing beside remains. Round the decay
Of that colossal wreck, boundless and bare
The lone and level sands stretch far away.
-- Ozymandias by Percy Shelley
"""
# write the file
file = open('data_file', mode='w', encoding='utf-8')
file.write(data)
file.close()
# get some info about the file
file = open('data_file', mode='r', encoding='utf-8')
print("After open...")
print("name: " + file.name)
print("encoding: " + file.encoding)
print("mode: " + file.mode)
file.close()
print("After close...")
print("name: " + file.name)
print("encoding: " + file.encoding)
print("mode: " + file.mode)
# print the file and close automatically
with open('data_file', mode='r', encoding='utf-8') as file:
for line in file:
print(line, end='')
# print the file in reverse, close automatically
with open('data_file', mode='r', encoding='utf-8') as file:
lines = file.read()
for line in reversed(lines):
print(line, end='')
print('\n')
# print the file line-by-line in reverse
with open('data_file', mode='r', encoding='utf-8') as file:
lines = list(file)
for line in reversed(lines):
print(line, end='')
|
apache-2.0
| 6,787,229,724,184,578,000
| 29.666667
| 59
| 0.688787
| false
| 3.11032
| false
| false
| false
|
jhamman/storylines
|
setup.py
|
1
|
4509
|
#!/usr/bin/env python
import os
import re
import sys
import warnings
from setuptools import setup
MAJOR = 0
MINOR = 0
MICRO = 0
ISRELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
QUALIFIER = ''
DISTNAME = 'storylines'
LICENSE = 'GNU General Public License v3.0'
AUTHOR = 'Joseph Hamman'
AUTHOR_EMAIL = 'jhamman@ucar.edu'
URL = 'https://github.com/jhamman/storylines'
CLASSIFIERS = [
'Development Status :: 1 - Planning',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Operating System :: POSIX',
'Intended Audience :: Science/Research',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Atmospheric Science',
]
INSTALL_REQUIRES = ['xarray >= 0.8.2']
TESTS_REQUIRE = ['pytest >= 3.0.3']
DESCRIPTION = "Quantitative hydrologic storylines to assess climate impacts"
LONG_DESCRIPTION = """
**storylines** is a framework for characterizing uncertainty in traditional
hydroloic climate impacts modeling chains (climate models, downscaling methods,
hydrologic models). It includes tools for evaluating model fidelity and culling
models accordingly to reduce these uncertainties, and finally distilling
projections into a discrete set of quantitative hydrologic storylines that
represent key, impact-focused, features from the full range of future
scenarios.
**storylines** is being developed at the National Center for Atmospheric
Research (NCAR_), Research Applications Laboratory (RAL_) - Hydrometeorological
Applications Program (HAP_) under the support of USACE.
.. _NCAR: http://ncar.ucar.edu/
.. _RAL: https://www.ral.ucar.edu
.. _HAP: https://www.ral.ucar.edu/hap
Important links
---------------
- HTML documentation: http://storylines.readthedocs.io
- Issue tracker: http://github.com/jhamman/storylines/issues
- Source code: http://github.com/jhamman/storylines
"""
# code to extract and write the version copied from pandas
FULLVERSION = VERSION
write_version = True
if not ISRELEASED:
import subprocess
FULLVERSION += '.dev'
pipe = None
for cmd in ['git', 'git.cmd']:
try:
pipe = subprocess.Popen(
[cmd, "describe", "--always", "--match", "v[0-9]*"],
stdout=subprocess.PIPE)
(so, serr) = pipe.communicate()
if pipe.returncode == 0:
break
except:
pass
if pipe is None or pipe.returncode != 0:
# no git, or not in git dir
if os.path.exists('storylines/version.py'):
warnings.warn("Couldn't get git revision, using existing"
"storylines/version.py")
write_version = False
else:
warnings.warn(
"Couldn't get git revision, using generic version string")
else:
# have git, in git dir, but may have used a shallow clone
# (travis does this)
rev = so.strip()
# makes distutils blow up on Python 2.7
if sys.version_info[0] >= 3:
rev = rev.decode('ascii')
if not rev.startswith('v') and re.match("[a-zA-Z0-9]{7,9}", rev):
# partial clone, manually construct version string
# this is the format before we started using git-describe
# to get an ordering on dev version strings.
rev = "v%s.dev-%s" % (VERSION, rev)
# Strip leading v from tags format "vx.y.z" to get th version string
FULLVERSION = rev.lstrip('v')
else:
FULLVERSION += QUALIFIER
def write_version_py(filename=None):
cnt = """\
version = '%s'
short_version = '%s'
"""
if not filename:
filename = os.path.join(
os.path.dirname(__file__), 'storylines', 'version.py')
a = open(filename, 'w')
try:
a.write(cnt % (FULLVERSION, VERSION))
finally:
a.close()
if write_version:
write_version_py()
setup(name=DISTNAME,
version=FULLVERSION,
license=LICENSE,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
classifiers=CLASSIFIERS,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
url=URL,
packages=['storylines', 'storylines.tools'],
package_data={'storylines': ['test/data/*']})
|
gpl-3.0
| 5,283,064,456,767,113,000
| 29.883562
| 79
| 0.642271
| false
| 3.726446
| false
| false
| false
|
apdjustino/DRCOG_Urbansim
|
src/opus_gui/results_manager/views/ui_results_browser.py
|
1
|
12710
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/Users/ckla/Documents/workspace/opus_trunk/opus_gui/results_manager/views/results_browser.ui'
#
# Created: Sun May 10 17:20:29 2009
# by: PyQt4 UI code generator 4.4.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_ResultsBrowser(object):
def setupUi(self, ResultsBrowser):
ResultsBrowser.setObjectName("ResultsBrowser")
ResultsBrowser.resize(819, 744)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(ResultsBrowser.sizePolicy().hasHeightForWidth())
ResultsBrowser.setSizePolicy(sizePolicy)
self.gridLayout_4 = QtGui.QGridLayout(ResultsBrowser)
self.gridLayout_4.setObjectName("gridLayout_4")
self.splitter_2 = QtGui.QSplitter(ResultsBrowser)
self.splitter_2.setOrientation(QtCore.Qt.Vertical)
self.splitter_2.setObjectName("splitter_2")
self.groupBox_3 = QtGui.QGroupBox(self.splitter_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_3.sizePolicy().hasHeightForWidth())
self.groupBox_3.setSizePolicy(sizePolicy)
self.groupBox_3.setAutoFillBackground(False)
self.groupBox_3.setFlat(False)
self.groupBox_3.setObjectName("groupBox_3")
self.verticalLayout = QtGui.QVBoxLayout(self.groupBox_3)
self.verticalLayout.setObjectName("verticalLayout")
self.configSplitter = QtGui.QSplitter(self.groupBox_3)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.configSplitter.sizePolicy().hasHeightForWidth())
self.configSplitter.setSizePolicy(sizePolicy)
self.configSplitter.setOrientation(QtCore.Qt.Horizontal)
self.configSplitter.setHandleWidth(12)
self.configSplitter.setObjectName("configSplitter")
self.groupBox = QtGui.QGroupBox(self.configSplitter)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(2)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox.sizePolicy().hasHeightForWidth())
self.groupBox.setSizePolicy(sizePolicy)
self.groupBox.setBaseSize(QtCore.QSize(0, 100))
self.groupBox.setFlat(True)
self.groupBox.setObjectName("groupBox")
self.verticalLayout_4 = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout_4.setSpacing(0)
self.verticalLayout_4.setMargin(0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.lst_available_runs = QtGui.QListWidget(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(4)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lst_available_runs.sizePolicy().hasHeightForWidth())
self.lst_available_runs.setSizePolicy(sizePolicy)
self.lst_available_runs.setMinimumSize(QtCore.QSize(0, 0))
self.lst_available_runs.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.lst_available_runs.setBaseSize(QtCore.QSize(100, 50))
self.lst_available_runs.setAlternatingRowColors(True)
self.lst_available_runs.setObjectName("lst_available_runs")
self.verticalLayout_4.addWidget(self.lst_available_runs)
self.groupBox_2 = QtGui.QGroupBox(self.configSplitter)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_2.sizePolicy().hasHeightForWidth())
self.groupBox_2.setSizePolicy(sizePolicy)
self.groupBox_2.setBaseSize(QtCore.QSize(20, 0))
self.groupBox_2.setFlat(True)
self.groupBox_2.setObjectName("groupBox_2")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.groupBox_2)
self.verticalLayout_3.setSpacing(0)
self.verticalLayout_3.setMargin(0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.lst_years = QtGui.QListWidget(self.groupBox_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lst_years.sizePolicy().hasHeightForWidth())
self.lst_years.setSizePolicy(sizePolicy)
self.lst_years.setMinimumSize(QtCore.QSize(0, 0))
self.lst_years.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.lst_years.setBaseSize(QtCore.QSize(20, 50))
self.lst_years.setAlternatingRowColors(True)
self.lst_years.setObjectName("lst_years")
self.verticalLayout_3.addWidget(self.lst_years)
self.groupBox_4 = QtGui.QGroupBox(self.configSplitter)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(5)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_4.sizePolicy().hasHeightForWidth())
self.groupBox_4.setSizePolicy(sizePolicy)
self.groupBox_4.setBaseSize(QtCore.QSize(500, 0))
self.groupBox_4.setFlat(True)
self.groupBox_4.setObjectName("groupBox_4")
self.verticalLayout_2 = QtGui.QVBoxLayout(self.groupBox_4)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setMargin(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.indicator_table = QtGui.QTableWidget(self.groupBox_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(7)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.indicator_table.sizePolicy().hasHeightForWidth())
self.indicator_table.setSizePolicy(sizePolicy)
self.indicator_table.setMinimumSize(QtCore.QSize(0, 0))
self.indicator_table.setBaseSize(QtCore.QSize(500, 50))
self.indicator_table.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.indicator_table.setDragDropOverwriteMode(False)
self.indicator_table.setAlternatingRowColors(True)
self.indicator_table.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.indicator_table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.indicator_table.setTextElideMode(QtCore.Qt.ElideNone)
self.indicator_table.setShowGrid(True)
self.indicator_table.setColumnCount(3)
self.indicator_table.setObjectName("indicator_table")
self.indicator_table.setColumnCount(3)
self.indicator_table.setRowCount(0)
self.verticalLayout_2.addWidget(self.indicator_table)
self.verticalLayout.addWidget(self.configSplitter)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
spacerItem = QtGui.QSpacerItem(40, 2, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.cb_auto_gen = QtGui.QCheckBox(self.groupBox_3)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.cb_auto_gen.sizePolicy().hasHeightForWidth())
self.cb_auto_gen.setSizePolicy(sizePolicy)
self.cb_auto_gen.setTristate(False)
self.cb_auto_gen.setObjectName("cb_auto_gen")
self.horizontalLayout.addWidget(self.cb_auto_gen)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.lbl_current_selection = QtGui.QLabel(self.groupBox_3)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.lbl_current_selection.setFont(font)
self.lbl_current_selection.setObjectName("lbl_current_selection")
self.horizontalLayout_2.addWidget(self.lbl_current_selection)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem1)
self.verticalLayout_5 = QtGui.QVBoxLayout()
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.pb_generate_results = QtGui.QPushButton(self.groupBox_3)
self.pb_urbancanvas = QtGui.QPushButton(self.groupBox_3)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pb_generate_results.sizePolicy().hasHeightForWidth())
self.pb_generate_results.setSizePolicy(sizePolicy)
self.pb_generate_results.setMinimumSize(QtCore.QSize(0, 0))
self.pb_generate_results.setObjectName("pb_generate_results")
self.verticalLayout_5.addWidget(self.pb_generate_results)
sizePolicy.setHeightForWidth(self.pb_urbancanvas.sizePolicy().hasHeightForWidth())
self.pb_urbancanvas.setSizePolicy(sizePolicy)
self.pb_urbancanvas.setMinimumSize(QtCore.QSize(0, 0))
self.pb_urbancanvas.setObjectName("pb_urbancanvas")
self.verticalLayout_5.addWidget(self.pb_urbancanvas)
self.horizontalLayout_2.addLayout(self.verticalLayout_5)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.tabwidget_visualizations = QtGui.QTabWidget(self.splitter_2)
self.tabwidget_visualizations.setMinimumSize(QtCore.QSize(0, 200))
self.tabwidget_visualizations.setObjectName("tabwidget_visualizations")
self.starttab = QtGui.QWidget()
self.starttab.setObjectName("starttab")
self.tabwidget_visualizations.addTab(self.starttab, "")
self.gridLayout_4.addWidget(self.splitter_2, 0, 0, 1, 1)
self.retranslateUi(ResultsBrowser)
self.tabwidget_visualizations.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(ResultsBrowser)
def retranslateUi(self, ResultsBrowser):
ResultsBrowser.setWindowTitle(QtGui.QApplication.translate("ResultsBrowser", "Result Browser", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_3.setTitle(QtGui.QApplication.translate("ResultsBrowser", "Configure an indicator to view", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("ResultsBrowser", "Simulation Runs", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setTitle(QtGui.QApplication.translate("ResultsBrowser", "Years", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_4.setTitle(QtGui.QApplication.translate("ResultsBrowser", "Indicators", None, QtGui.QApplication.UnicodeUTF8))
self.indicator_table.setSortingEnabled(False)
self.cb_auto_gen.setToolTip(QtGui.QApplication.translate("ResultsBrowser", "Automatically generate and view the indicator when it\'s selected", None, QtGui.QApplication.UnicodeUTF8))
self.cb_auto_gen.setText(QtGui.QApplication.translate("ResultsBrowser", "Automatically generate", None, QtGui.QApplication.UnicodeUTF8))
self.lbl_current_selection.setText(QtGui.QApplication.translate("ResultsBrowser", "current selection", None, QtGui.QApplication.UnicodeUTF8))
self.pb_generate_results.setText(QtGui.QApplication.translate("ResultsBrowser", "Generate results", None, QtGui.QApplication.UnicodeUTF8))
self.pb_urbancanvas.setText(QtGui.QApplication.translate("ResultsBrowser", "View in UrbanCanvas", None, QtGui.QApplication.UnicodeUTF8))
self.tabwidget_visualizations.setTabText(self.tabwidget_visualizations.indexOf(self.starttab), QtGui.QApplication.translate("ResultsBrowser", "starttab", None, QtGui.QApplication.UnicodeUTF8))
################################3
self.cb_auto_gen.setText(QtGui.QApplication.translate("ResultsBrowser", "Uncertainty options generate", None, QtGui.QApplication.UnicodeUTF8))
|
agpl-3.0
| -1,741,197,974,735,579,600
| 61.925743
| 200
| 0.735877
| false
| 4.019608
| true
| false
| false
|
rherlt/GoodVibrations
|
src/GoodVibrations.Listener/speech_recognition/examples/microphone_recognition.py
|
1
|
3228
|
#!/usr/bin/env python3
# NOTE: this example requires PyAudio because it uses the Microphone class
import speech_recognition as sr
# obtain audio from the microphone
r = sr.Recognizer()
with sr.Microphone() as source:
print("Say something!")
audio = r.listen(source)
# recognize speech using Sphinx
try:
print("Sphinx thinks you said " + r.recognize_sphinx(audio))
except sr.UnknownValueError:
print("Sphinx could not understand audio")
except sr.RequestError as e:
print("Sphinx error; {0}".format(e))
# recognize speech using Google Speech Recognition
try:
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
print("Google Speech Recognition thinks you said " + r.recognize_google(audio))
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
# recognize speech using Wit.ai
WIT_AI_KEY = "INSERT WIT.AI API KEY HERE" # Wit.ai keys are 32-character uppercase alphanumeric strings
try:
print("Wit.ai thinks you said " + r.recognize_wit(audio, key=WIT_AI_KEY))
except sr.UnknownValueError:
print("Wit.ai could not understand audio")
except sr.RequestError as e:
print("Could not request results from Wit.ai service; {0}".format(e))
# recognize speech using Microsoft Bing Voice Recognition
BING_KEY = "INSERT BING API KEY HERE" # Microsoft Bing Voice Recognition API keys 32-character lowercase hexadecimal strings
try:
print("Microsoft Bing Voice Recognition thinks you said " + r.recognize_bing(audio, key=BING_KEY))
except sr.UnknownValueError:
print("Microsoft Bing Voice Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Microsoft Bing Voice Recognition service; {0}".format(e))
# recognize speech using Houndify
HOUNDIFY_CLIENT_ID = "INSERT HOUNDIFY CLIENT ID HERE" # Houndify client IDs are Base64-encoded strings
HOUNDIFY_CLIENT_KEY = "INSERT HOUNDIFY CLIENT KEY HERE" # Houndify client keys are Base64-encoded strings
try:
print("Houndify thinks you said " + r.recognize_houndify(audio, client_id=HOUNDIFY_CLIENT_ID, client_key=HOUNDIFY_CLIENT_KEY))
except sr.UnknownValueError:
print("Houndify could not understand audio")
except sr.RequestError as e:
print("Could not request results from Houndify service; {0}".format(e))
# recognize speech using IBM Speech to Text
IBM_USERNAME = "INSERT IBM SPEECH TO TEXT USERNAME HERE" # IBM Speech to Text usernames are strings of the form XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
IBM_PASSWORD = "INSERT IBM SPEECH TO TEXT PASSWORD HERE" # IBM Speech to Text passwords are mixed-case alphanumeric strings
try:
print("IBM Speech to Text thinks you said " + r.recognize_ibm(audio, username=IBM_USERNAME, password=IBM_PASSWORD))
except sr.UnknownValueError:
print("IBM Speech to Text could not understand audio")
except sr.RequestError as e:
print("Could not request results from IBM Speech to Text service; {0}".format(e))
|
mit
| 1,320,618,614,434,476,300
| 46.470588
| 148
| 0.757125
| false
| 3.474704
| false
| false
| false
|
ThiefMaster/jinja2
|
scripts/make-release.py
|
2
|
4375
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
make-release
~~~~~~~~~~~~
Helper script that performs a release. Does pretty much everything
automatically for us.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
import os
import re
from datetime import datetime, date
from subprocess import Popen, PIPE
try:
import wheel
except ImportError:
wheel = None
_date_strip_re = re.compile(r'(?<=\d)(st|nd|rd|th)')
def parse_changelog():
with open('CHANGES') as f:
lineiter = iter(f)
for line in lineiter:
match = re.search('^Version\s+(.*)', line.strip())
if match is None:
continue
length = len(match.group(1))
version = match.group(1).strip()
if lineiter.next().count('-') != len(match.group(0)):
continue
while 1:
change_info = lineiter.next().strip()
if change_info:
break
match = re.search(r'(?:codename (.*),\s*)?'
r'released on (\w+\s+\d+\w+\s+\d+)(?i)',
change_info)
if match is None:
continue
codename, datestr = match.groups()
return version, parse_date(datestr), codename
def bump_version(version):
try:
parts = map(int, version.split('.'))
except ValueError:
fail('Current version is not numeric')
parts[-1] += 1
return '.'.join(map(str, parts))
def parse_date(string):
string = _date_strip_re.sub('', string)
return datetime.strptime(string, '%B %d %Y')
def set_filename_version(filename, version_number, pattern):
changed = []
def inject_version(match):
before, old, after = match.groups()
changed.append(True)
return before + version_number + after
with open(filename) as f:
contents = re.sub(r"^(\s*%s\s*=\s*')(.+?)(')(?sm)" % pattern,
inject_version, f.read())
if not changed:
fail('Could not find %s in %s', pattern, filename)
with open(filename, 'w') as f:
f.write(contents)
def set_init_version(version):
info('Setting __init__.py version to %s', version)
set_filename_version('jinja2/__init__.py', version, '__version__')
def set_setup_version(version):
info('Setting setup.py version to %s', version)
set_filename_version('setup.py', version, 'version')
def build_and_upload():
cmd = [sys.executable, 'setup.py', 'sdist', 'bdist_wheel', 'upload']
if wheel is not None:
cmd.insert(4, 'bdist_wheel')
Popen(cmd).wait()
def fail(message, *args):
print >> sys.stderr, 'Error:', message % args
sys.exit(1)
def info(message, *args):
print >> sys.stderr, message % args
def get_git_tags():
return set(Popen(['git', 'tag'], stdout=PIPE).communicate()[0].splitlines())
def git_is_clean():
return Popen(['git', 'diff', '--quiet']).wait() == 0
def make_git_commit(message, *args):
message = message % args
Popen(['git', 'commit', '-am', message]).wait()
def make_git_tag(tag):
info('Tagging "%s"', tag)
Popen(['git', 'tag', tag]).wait()
def main():
os.chdir(os.path.join(os.path.dirname(__file__), '..'))
rv = parse_changelog()
if rv is None:
fail('Could not parse changelog')
version, release_date, codename = rv
dev_version = bump_version(version) + '.dev'
info('Releasing %s (codename %s, release date %s)',
version, codename, release_date.strftime('%d/%m/%Y'))
tags = get_git_tags()
if version in tags:
fail('Version "%s" is already tagged', version)
if release_date.date() != date.today():
fail('Release date is not today (%s != %s)', release_date.date(), date.today())
if not git_is_clean():
fail('You have uncommitted changes in git')
if wheel is None:
print ('Warning: You need to install the wheel package '
'to upload a wheel distribution.')
set_init_version(version)
set_setup_version(version)
make_git_commit('Bump version number to %s', version)
make_git_tag(version)
build_and_upload()
set_init_version(dev_version)
set_setup_version(dev_version)
if __name__ == '__main__':
main()
|
bsd-3-clause
| -7,196,274,055,336,344,000
| 25.676829
| 87
| 0.573714
| false
| 3.673384
| false
| false
| false
|
Metaleer/hexchat-scripts
|
regexkb.py
|
1
|
2158
|
from __future__ import print_function
__module_name__ = 'Regex Kickban'
__module_version__ = '0.2'
__module_description__ = 'Kickbans clients from specified channels on regex match against their message or notice to channel'
__author__ = 'Daniel A. J.'
# TODO:
# When ChanServ-type services are available, ask for ops if not opped
# If client is signed into account, ban accountname instead of host
import hexchat
import re
re = re.compile(r'\bfoo\b') # regex pattern to be matched against in user's message or notice
check_channels = ['#test', '#fooness'] # channel(s) where script is active
net = 'Bouncer' # network where script is active
def msg_search(word, word_eol, userdata):
if word[2].startswith('#') == False:
return
user_message = ' '.join(word[3:])[1:]
channel = word[2]
user_nickname = ''.join(word[0][1:word[0].index('!')])
user_host = ''.join(word[0][word[0].index('@'):])
for x in check_channels:
if re.search(user_message) != None and channel == x and hexchat.get_info("network") == net:
hexchat.command("mode %s +b *!*%s" % (channel, user_host))
hexchat.command("kick %s regex pattern detected" % user_nickname)
return hexchat.EAT_ALL
def notice_search(word, word_eol, userdata):
if word[2].startswith('#') == False:
return
user_message = ' '.join(word[3:])[1:]
channel = word[2]
user_nickname = ''.join(word[0][1:word[0].index('!')])
user_host = ''.join(word[0][word[0].index('@'):])
for x in check_channels:
if re.search(user_message) != None and channel == x and hexchat.get_info("network") == net:
hexchat.command("mode %s +b *!*%s" % (channel, user_host))
hexchat.command("kick %s regex pattern detected" % user_nickname)
return hexchat.EAT_ALL
def unload_regexkb(userdata):
print(__module_name__, 'version', __module_version__, 'unloaded.')
hexchat.hook_server("PRIVMSG", msg_search)
hexchat.hook_server("NOTICE", notice_search)
hexchat.hook_unload(unload_regexkb)
print(__module_name__, 'version', __module_version__, 'loaded.')
|
mit
| 3,880,300,969,970,562,600
| 36.206897
| 125
| 0.629286
| false
| 3.340557
| false
| false
| false
|
andreimaximov/algorithms
|
leetcode/algorithms/wildcard-matching/solution.py
|
1
|
1743
|
#!/usr/bin/env python
class Solution(object):
def isMatch(self, s, p):
"""
Returns a boolean indicating if the pattern p matches string s. See
LeetCode problem description for full pattern spec.
"""
n = len(s)
m = len(p)
# If the pattern has more non-star chars than s has total chars, there
# is no way we can match the pattern even if we ignore all stars.'
if (m - p.count('*') > n):
return False
# Each lastDP[i] represents isMatch(s[:i], p[:j]) for previous j. We do
# not need a full 2D matrix since the recursive relation only depends
# on a sub-problem that is one level lower.
lastDP = [False] * (n + 1)
lastDP[0] = True
for j in range(1, m + 1):
# Create DP for the current j.
nextDP = [False] * (n + 1)
# Empty s matches p prefix if prefix contains only *'s.
nextDP[0] = lastDP[0] and p[j - 1] == '*'
for i in range(1, n + 1):
if p[j - 1] == '*':
# Skip * or current character.
nextDP[i] = lastDP[i] or nextDP[i - 1]
elif p[j - 1] == '?':
# Skip current character and ?.
nextDP[i] = lastDP[i - 1]
else:
# Ensure characters match and that s[:i] matches p[:j].
nextDP[i] = (s[i - 1] == p[j - 1]) and \
lastDP[i - 1]
lastDP = nextDP
return lastDP[-1]
def main():
print('Please run this solution on LeetCode.')
print('https://leetcode.com/problems/wildcard-matching/')
if __name__ == '__main__':
main()
|
mit
| -6,150,173,116,118,105,000
| 33.176471
| 79
| 0.491681
| false
| 3.78913
| false
| false
| false
|
caspar/PhysicsLab
|
21_Midterm/plack2.py
|
1
|
1726
|
# Lab 0
# Linear Least Squares Fit
# Author Caspar Lant
import numpy as np
from scipy.optimize import curve_fit
import matplotlib.pyplot as plt
# load csv file
DATA = "data.csv";
frequency, voltage = np.loadtxt(DATA, skiprows=1 , unpack=True, delimiter=',');
# plot temperature vs. pressure + error bars
plt.ylabel("Voltage (V)");
plt.xlabel("Frequency ($10^{14}$ Hz)");
plt.title("Voltage vs. Frequency");
plt.plot(frequency, voltage, 'ro', linestyle = '', mec='r', ms=5 );
# linear least squares fit line
def least_squares_fit (x, y):
xavg = x.mean()
slope = ( y * ( x - xavg)).sum() / (x*(x-xavg)).sum()
intercept = y.mean()-slope*xavg
return slope, intercept
slope, intercept = least_squares_fit(frequency, voltage);
# create arrays to plot
y1 = slope * 7 + intercept; # y1 = m(x1) + b
y2 = slope * 0 + intercept; # y2 = m(x2) + b
x_range = [0, 7]; # array of x values
y_range = [y2, y1]; # array of y values
PLANCK = slope* 1.60217662
print("plancks constant:", PLANCK)
print("or", 1/PLANCK)
# show the graph
plt.plot(x_range, y_range, color="blue", linestyle = '-', label="Actual");
slope = 0.413566766
y1 = slope * 7 + intercept; # y1 = m(x1) + b
y2 = slope * 0 + intercept; # y2 = m(x2) + b
x_range = [0, 7]; # array of x values
y_range = [y2, y1]; # array of y values
PLANCK = slope * 1.60217662
# print("plancks constant:", PLANCK)
# print("or", 1/PLANCK)
# show the graph
plt.plot(x_range, y_range, color="grey",linestyle = ':', label="Expected");
plt.legend(loc='best')
plt.annotate("Slope = $6.14 * 10^{-34}$", xy=(2.27, -0.32), xytext=(2.5, -.7), arrowprops=dict(arrowstyle="->"))
# plt.legend(["slope = 1"])
plt.show();
|
mit
| 2,794,208,592,931,171,300
| 30.381818
| 112
| 0.614716
| false
| 2.7616
| false
| false
| false
|
raulhigueras/LolaVA
|
speech/input.py
|
1
|
2363
|
# -*- coding: utf-8 -*-
# Recoge el input, ya sea mediante el STT de Google o por el bot de Telegram
import speech_recognition as sr
import time, datetime, telepot, os
from config import get_config
bot = telepot.Bot('BOT_KEY')
def ask():
modo = get_config.get_profile()["modo"]
os.system("aplay resources/sound2.wav")
if modo == "texto":
print "Esperando mensaje"
response = bot.getUpdates(offset=-5)
length = len(response)
print length
if(length > 0):
last_id = response[-1]["update_id"]
while last_id == bot.getUpdates(offset=-5)[-1]["update_id"]:
time.sleep(3)
else:
while length == len(response):
response = bot.getUpdates(offset=-5)
time.sleep(3)
print "---"
response = bot.getUpdates()
respuesta = clean_input(response[-1]["message"]["text"].lower())
print respuesta
return respuesta
elif modo == "audio":
r = sr.Recognizer()
with sr.Microphone() as source:
audio = r.listen(source)
try:
rec = r.recognize_google(audio, language="es-ES")
print ("Has dicho " + rec)
return rec.lower()
except sr.UnknownValueError:
print( "No se ha entendido el audio")
except sr.RequestError as e:
print("Ha habido un error con el reconocimiento de voz {0}".format(e))
else:
print "Hay un error con la configuración"
def listen():
r = sr.Recognizer()
with sr.Microphone() as source:
print("Escuchando")
audio = r.listen(source)
try:
rec = r.recognize_google(audio, language="es-ES")
if (get_config.get_profile()['asistente'] in rec.lower()):
return True
else:
return False
except sr.UnknownValueError:
print( "No se ha entendido el audio")
except sr.RequestError as e:
print("Ha habido un error con el reconocimiento de voz {0}".format(e))
def clean_input(frase):
caracteres_especiales = {
'á':'a',
'é':'e',
'í':'i',
'ó':'o',
'ú':'u',
'ü':'u',
'ñ':'n',
'¿':'?',
'¡':'!',
}
frase = list(frase)
for i in range(0, len(frase)):
if caracteres_especiales.has_key(frase[i].encode("utf-8")):
frase[i] = caracteres_especiales[frase[i].encode("utf-8")]
if frase[0] == "?" or frase[0] == "!":
del frase[0]
if frase[-1] == "?" or frase[-1] == "!":
del frase[-1]
return "".join(frase)
|
mit
| 6,502,096,092,672,122,000
| 27.011905
| 86
| 0.59966
| false
| 2.828125
| false
| false
| false
|
OCA/purchase-workflow
|
setup/_metapackage/setup.py
|
1
|
3639
|
import setuptools
with open('VERSION.txt', 'r') as f:
version = f.read().strip()
setuptools.setup(
name="odoo12-addons-oca-purchase-workflow",
description="Meta package for oca-purchase-workflow Odoo addons",
version=version,
install_requires=[
'odoo12-addon-procurement_purchase_no_grouping',
'odoo12-addon-product_form_purchase_link',
'odoo12-addon-product_supplier_code_purchase',
'odoo12-addon-purchase_allowed_product',
'odoo12-addon-purchase_analytic_global',
'odoo12-addon-purchase_blanket_order',
'odoo12-addon-purchase_commercial_partner',
'odoo12-addon-purchase_date_planned_manual',
'odoo12-addon-purchase_default_terms_conditions',
'odoo12-addon-purchase_delivery_split_date',
'odoo12-addon-purchase_deposit',
'odoo12-addon-purchase_discount',
'odoo12-addon-purchase_exception',
'odoo12-addon-purchase_force_invoiced',
'odoo12-addon-purchase_invoice_plan',
'odoo12-addon-purchase_landed_cost',
'odoo12-addon-purchase_last_price_info',
'odoo12-addon-purchase_line_procurement_group',
'odoo12-addon-purchase_location_by_line',
'odoo12-addon-purchase_manual_delivery',
'odoo12-addon-purchase_minimum_amount',
'odoo12-addon-purchase_open_qty',
'odoo12-addon-purchase_order_analytic_search',
'odoo12-addon-purchase_order_approval_block',
'odoo12-addon-purchase_order_approved',
'odoo12-addon-purchase_order_archive',
'odoo12-addon-purchase_order_general_discount',
'odoo12-addon-purchase_order_line_deep_sort',
'odoo12-addon-purchase_order_line_description',
'odoo12-addon-purchase_order_line_price_history',
'odoo12-addon-purchase_order_line_price_history_discount',
'odoo12-addon-purchase_order_line_sequence',
'odoo12-addon-purchase_order_line_stock_available',
'odoo12-addon-purchase_order_product_recommendation',
'odoo12-addon-purchase_order_product_recommendation_brand',
'odoo12-addon-purchase_order_product_recommendation_secondary_unit',
'odoo12-addon-purchase_order_secondary_unit',
'odoo12-addon-purchase_order_type',
'odoo12-addon-purchase_order_uninvoiced_amount',
'odoo12-addon-purchase_picking_state',
'odoo12-addon-purchase_price_recalculation',
'odoo12-addon-purchase_product_usage',
'odoo12-addon-purchase_propagate_qty',
'odoo12-addon-purchase_quick',
'odoo12-addon-purchase_reception_notify',
'odoo12-addon-purchase_reception_status',
'odoo12-addon-purchase_request',
'odoo12-addon-purchase_request_department',
'odoo12-addon-purchase_request_order_approved',
'odoo12-addon-purchase_request_product_usage',
'odoo12-addon-purchase_request_tier_validation',
'odoo12-addon-purchase_request_usage_department',
'odoo12-addon-purchase_requisition_auto_rfq',
'odoo12-addon-purchase_requisition_line_description',
'odoo12-addon-purchase_requisition_tier_validation',
'odoo12-addon-purchase_security',
'odoo12-addon-purchase_stock_price_unit_sync',
'odoo12-addon-purchase_stock_return_request',
'odoo12-addon-purchase_tier_validation',
'odoo12-addon-purchase_triple_discount',
'odoo12-addon-purchase_work_acceptance',
'odoo12-addon-subcontracted_service',
'odoo12-addon-supplier_calendar',
],
classifiers=[
'Programming Language :: Python',
'Framework :: Odoo',
]
)
|
agpl-3.0
| 6,702,452,264,549,585,000
| 45.063291
| 76
| 0.677109
| false
| 3.64995
| false
| false
| false
|
Strassengezwitscher/Strassengezwitscher
|
crowdgezwitscher/contact/mail.py
|
1
|
5814
|
# pylint: disable=bad-builtin
from email.utils import formatdate
from tempfile import TemporaryDirectory
from django.core.mail import EmailMessage, SafeMIMEText, SafeMIMEMultipart, make_msgid
from django.core.mail.utils import DNS_NAME
from django.core.mail.backends.locmem import EmailBackend as LocMemEmailBackend
from django.utils.encoding import smart_text, force_text
from django.conf import settings
from django.core import mail
from gnupg import GPG
from crowdgezwitscher.log import logger
from contact.models import Key
from contact.utils import GPGException, handle_gpg_error
class GPGEmailMessage(EmailMessage):
"""
Django's default email class on paranoia.
The email is encrypted (but not signed) during send() using GPG in PGP/MIME format.
"""
encrypted_subtype = 'encrypted'
gpg_attachment_filename = 'encrypted.asc'
def _encrypt(self, plain):
# test if we have public keys for all recipients
available_recipients = []
keys = []
for key in Key.objects.all():
keys.append(key)
available_recipients.extend(key.addresses.split(', '))
logger.debug("available_recipients: %s", available_recipients)
if not all(recipient in available_recipients for recipient in self.recipients()):
logger.error("Public key not present for at least one of these recipients: %s", self.recipients())
raise GPGException("Public key not present for at least one recipient")
# encryption
with TemporaryDirectory() as temp_dir:
gpg = GPG(gnupghome=temp_dir)
for key in keys:
gpg.import_keys(key.key)
res = gpg.encrypt(plain, self.recipients(), always_trust=True)
if not res:
handle_gpg_error(res, 'encryption')
return smart_text(res)
def message(self):
"""
Returns the final message to be sent, including all headers etc. Content and attachments are encrypted using
GPG in PGP/MIME format (RFC 3156).
"""
def build_plain_message():
msg = SafeMIMEText(self.body, self.content_subtype, encoding)
msg = self._create_message(msg)
return msg
def build_version_attachment():
version_attachment = SafeMIMEText('Version: 1\n', self.content_subtype, encoding)
del version_attachment['Content-Type']
version_attachment.add_header('Content-Type', 'application/pgp-encrypted')
version_attachment.add_header('Content-Description', 'PGP/MIME Versions Identification')
return version_attachment
def build_gpg_attachment():
gpg_attachment = SafeMIMEText(encrypted_msg, self.content_subtype, encoding)
del gpg_attachment['Content-Type']
gpg_attachment.add_header('Content-Type', 'application/octet-stream', name=self.gpg_attachment_filename)
gpg_attachment.add_header('Content-Disposition', 'inline', filename=self.gpg_attachment_filename)
gpg_attachment.add_header('Content-Description', 'OpenPGP encrypted message')
return gpg_attachment
encoding = self.encoding or settings.DEFAULT_CHARSET
# build message including attachments as it would also be built without GPG
msg = build_plain_message()
# encrypt whole message including attachments
encrypted_msg = self._encrypt(str(msg))
# build new message object wrapping the encrypted message
msg = SafeMIMEMultipart(_subtype=self.encrypted_subtype,
encoding=encoding,
protocol='application/pgp-encrypted')
version_attachment = build_version_attachment()
gpg_attachment = build_gpg_attachment()
msg.attach(version_attachment)
msg.attach(gpg_attachment)
self.extra_headers['Content-Transfer-Encoding'] = '7bit'
# add headers
# everything below this line has not been modified when overriding message()
############################################################################
msg['Subject'] = self.subject
msg['From'] = self.extra_headers.get('From', self.from_email)
msg['To'] = self.extra_headers.get('To', ', '.join(map(force_text, self.to)))
if self.cc:
msg['Cc'] = ', '.join(map(force_text, self.cc))
if self.reply_to:
msg['Reply-To'] = self.extra_headers.get('Reply-To', ', '.join(map(force_text, self.reply_to)))
# Email header names are case-insensitive (RFC 2045), so we have to
# accommodate that when doing comparisons.
header_names = [key.lower() for key in self.extra_headers]
if 'date' not in header_names:
msg['Date'] = formatdate()
if 'message-id' not in header_names:
# Use cached DNS_NAME for performance
msg['Message-ID'] = make_msgid(domain=DNS_NAME)
for name, value in self.extra_headers.items():
if name.lower() in ('from', 'to'): # From and To are already handled
continue
msg[name] = value
return msg
class GPGLocMemEmailBackend(LocMemEmailBackend):
"""
An email backend for use during test sessions.
Emails are prepared for final sending, so they include all headers etc.
The test connection stores email messages in a dummy outbox,
rather than sending them out on the wire.
The dummy outbox is accessible through the outbox instance attribute.
"""
def send_messages(self, messages):
"""Redirect final messages to the dummy outbox"""
messages = [message.message() for message in messages]
mail.outbox.extend(messages)
return len(messages)
|
mit
| -1,930,372,365,059,580,700
| 40.528571
| 116
| 0.642071
| false
| 4.348542
| false
| false
| false
|
MuffinMedic/CloudBot
|
plugins/horoscope.py
|
1
|
2411
|
# Plugin by Infinity - <https://github.com/infinitylabs/UguuBot>
import requests
from bs4 import BeautifulSoup
from sqlalchemy import Table, String, Column, select
from cloudbot import hook
from cloudbot.util import database
table = Table(
'horoscope',
database.metadata,
Column('nick', String, primary_key=True),
Column('sign', String)
)
def get_sign(db, nick):
row = db.execute(select([table.c.sign]).where(table.c.nick == nick.lower())).fetchone()
if not row:
return None
return row[0]
def set_sign(db, nick, sign):
res = db.execute(table.update().values(sign=sign.lower()).where(table.c.nick == nick.lower()))
if res.rowcount == 0:
db.execute(table.insert().values(nick=nick.lower(), sign=sign.lower()))
db.commit()
@hook.command(autohelp=False)
def horoscope(text, db, bot, nick, notice, notice_doc, reply, message):
"""[sign] - get your horoscope"""
signs = {
'aries': '1',
'taurus': '2',
'gemini': '3',
'cancer': '4',
'leo': '5',
'virgo': '6',
'libra': '7',
'scorpio': '8',
'sagittarius': '9',
'capricorn': '10',
'aquarius': '11',
'pisces': '12'
}
headers = {'User-Agent': bot.user_agent}
# check if the user asked us not to save his details
dontsave = text.endswith(" dontsave")
if dontsave:
sign = text[:-9].strip().lower()
else:
sign = text.strip().lower()
if not sign:
sign = get_sign(db, nick)
if not sign:
notice_doc()
return
sign = sign.strip().lower()
if sign not in signs:
notice("Unknown sign: {}".format(sign))
return
params = {
"sign": signs[sign]
}
url = "http://www.horoscope.com/us/horoscopes/general/horoscope-general-daily-today.aspx"
try:
request = requests.get(url, params=params, headers=headers)
request.raise_for_status()
except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e:
reply("Could not get horoscope: {}. URL Error".format(e))
raise
soup = BeautifulSoup(request.text)
horoscope_text = soup.find("main", class_="main-horoscope").find("p").text
result = "\x02{}\x02 {}".format(sign, horoscope_text)
if text and not dontsave:
set_sign(db, nick, sign)
message(result)
|
gpl-3.0
| -6,390,855,012,608,246,000
| 24.648936
| 98
| 0.591456
| false
| 3.33011
| false
| false
| false
|
alexanderad/pony-standup-bot
|
tests/test_tasks_send_report_summary.py
|
1
|
7806
|
from __future__ import absolute_import
from datetime import datetime
from flexmock import flexmock
import pony.tasks
from tests.test_base import BaseTest
class SendReportSummaryTest(BaseTest):
def setUp(self):
super(SendReportSummaryTest, self).setUp()
self.bot.storage.set('report', {})
self.bot.plugin_config = {
'_dummy_team': {
'post_summary_to': '#dummy-channel',
'name': 'Dummy Team'
}
}
(flexmock(self.bot)
.should_receive('get_user_by_id')
.with_args('_user_id')
.and_return({
'id': '_user_id',
'color': 'aabbcc',
'profile': {
'real_name': 'Dummy User'
}
}))
(flexmock(self.bot)
.should_receive('get_user_by_name')
.with_args('@user')
.and_return({
'id': '_user_id',
'color': 'aabbcc',
'profile': {
'real_name': 'Dummy User'
}
}))
def test_get_user_avatar_is_failsafe(self):
(flexmock(self.slack)
.should_receive('api_call')
.with_args('users.list')
.and_return(dict(members=[])))
task = pony.tasks.SendReportSummary('_dummy_team')
self.assertIsNone(task.get_user_avatar(self.slack, '_user_id'))
def test_get_user_avatar(self):
(flexmock(self.slack)
.should_receive('api_call')
.with_args('users.list')
.and_return({
'members': [{
'id': '_user_id',
'profile': {
'image_192': '_image_192_url',
}
}]
}))
task = pony.tasks.SendReportSummary('_dummy_team')
self.assertEqual(
task.get_user_avatar(self.slack, '_user_id'), '_image_192_url')
def test_get_user_avatar_lazy_loads_profiles(self):
(flexmock(self.slack)
.should_receive('api_call')
.with_args('users.list')
.and_return(dict(members=[]))
.times(1))
task = pony.tasks.SendReportSummary('_dummy_team')
self.assertIsNone(task.get_user_avatar(self.slack, '_user_id'))
self.assertIsNone(task.get_user_avatar(self.slack, '_user_id'))
self.assertIsNone(task.get_user_avatar(self.slack, '_user_id'))
def test_execute_no_reports(self):
self.bot.storage.set('report', {})
task = pony.tasks.SendReportSummary('_dummy_team')
self.assertIsNone(task.execute(self.bot, self.slack))
def test_execute_no_report_for_this_team(self):
self.bot.storage.set('report', {
datetime.utcnow().date(): {}
})
task = pony.tasks.SendReportSummary('_dummy_team')
self.assertIsNone(task.execute(self.bot, self.slack))
def test_execute_report_already_sent(self):
self.bot.storage.set('report', {
datetime.utcnow().date(): {
'_dummy_team': {
'reported_at': datetime.utcnow()
}
}
})
task = pony.tasks.SendReportSummary('_dummy_team')
self.assertIsNone(task.execute(self.bot, self.slack))
self.assertEqual(len(self.bot.fast_queue), 0)
def test_execute_user_not_seen_online(self):
self.bot.plugin_config['_dummy_team']['users'] = ['@user']
self.bot.storage.set('report', {
datetime.utcnow().date(): {
'_dummy_team': {
'reports': {
'_user_id': {
'seen_online': False
}
}
}
}
})
task = pony.tasks.SendReportSummary('_dummy_team')
self.assertIsNone(task.execute(self.bot, self.slack))
report = self.bot.fast_queue.pop()
self.assertIsInstance(report, pony.tasks.SendMessage)
self.assertEqual(report.to, '#dummy-channel')
self.assertIn('Summary for Dummy Team', report.text)
self.assertIn(
{'color': '#ccc', 'title': 'Offline', 'text': 'Dummy User'},
report.attachments
)
def test_execute_user_returned_no_response(self):
self.bot.plugin_config['_dummy_team']['users'] = ['@user']
self.bot.storage.set('report', {
datetime.utcnow().date(): {
'_dummy_team': {
'reports': {
'_user_id': {
'seen_online': True
}
}
}
}
})
task = pony.tasks.SendReportSummary('_dummy_team')
self.assertIsNone(task.execute(self.bot, self.slack))
report = self.bot.fast_queue.pop()
self.assertIsInstance(report, pony.tasks.SendMessage)
self.assertEqual(report.to, '#dummy-channel')
self.assertIn('Summary for Dummy Team', report.text)
self.assertIn(
{'color': '#ccc', 'title': 'No Response', 'text': 'Dummy User'},
report.attachments
)
def test_execute(self):
self.bot.plugin_config['_dummy_team']['users'] = ['@user']
self.bot.storage.set('report', {
datetime.utcnow().date(): {
'_dummy_team': {
'reports': {
'_user_id': {
'seen_online': True,
'reported_at': datetime.utcnow(),
'report': [
'line1',
'line2'
]
}
}
}
}
})
task = pony.tasks.SendReportSummary('_dummy_team')
(flexmock(task)
.should_receive('get_user_avatar')
.with_args(self.slack, '_user_id')
.and_return('_dummy_user_avatar_url'))
self.assertIsNone(task.execute(self.bot, self.slack))
report = self.bot.fast_queue.pop()
self.assertIsInstance(report, pony.tasks.SendMessage)
self.assertEqual(report.to, '#dummy-channel')
self.assertIn('Summary for Dummy Team', report.text)
report_line = report.attachments.pop()
self.assertEqual(report_line['title'], 'Dummy User')
self.assertEqual(report_line['text'], 'line1\nline2')
self.assertEqual(report_line['color'], '#aabbcc')
self.assertEqual(report_line['thumb_url'], '_dummy_user_avatar_url')
self.assertIsNotNone(report_line['ts'])
def test_execute_when_user_has_department_assigned(self):
self.bot.plugin_config['_dummy_team']['users'] = ['@user']
self.bot.storage.set('report', {
datetime.utcnow().date(): {
'_dummy_team': {
'reports': {
'_user_id': {
'seen_online': True,
'department': 'Dev Department',
'reported_at': datetime.utcnow(),
'report': [
'line1',
'line2'
]
}
}
}
}
})
task = pony.tasks.SendReportSummary('_dummy_team')
(flexmock(task)
.should_receive('get_user_avatar')
.with_args(self.slack, '_user_id')
.and_return('_dummy_user_avatar_url'))
self.assertIsNone(task.execute(self.bot, self.slack))
report = self.bot.fast_queue.pop()
report_line = report.attachments.pop()
self.assertEqual(report_line['footer'], 'Dev Department')
|
mit
| 8,936,455,861,183,878,000
| 32.646552
| 76
| 0.492954
| false
| 4.097638
| true
| false
| false
|
DirectXMan12/datanozzle
|
datanozzle/client.py
|
1
|
5713
|
# datagrepper-client -- A Python client for datagrepper
# Copyright (C) 2015 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import copy
import datetime
import collections
import urllib.parse as urlparse
import requests
def _filter_arg(name, multiple=True):
if multiple:
def filter_on_arg_mult(self, *args):
g = copy.deepcopy(self)
g._args.setdefault(name, [])
g._args[name].extend(args)
return g
return filter_on_arg_mult
else:
def filter_on_arg(self, arg):
g = copy.deepcopy(self)
g._args[name] = arg
return g
return filter_on_arg
class Entry(collections.Mapping):
__slots__ = ('certificate', 'signature', 'meta',
'index', 'timestamp', 'topic', '_msg')
def __init__(self, json):
self.certificate = json['certificate']
self.signature = json['signature']
self.meta = json.get('meta', {})
self.index = json['i']
self.timestamp = datetime.datetime.fromtimestamp(
float(json['timestamp']))
self.topic = json['topic']
self._msg = json['msg']
def __getitem__(self, key):
return self._msg[key]
def __iter__(self):
return iter(self._msg)
def __len__(self):
return len(self)
def __repr__(self):
return ('<Entry[{topic} -- {ind} @ {ts}] {msg}>').format(
topic=self.topic, ind=self.index,
cert=self.certificate, sig=self.signature,
ts=self.timestamp, msg=self._msg)
# TODO(directxman12): write from_id
class Grepper(object):
def __init__(self, target='https://apps.fedoraproject.org/datagrepper/'):
self._base = target
if self._base[-1] != '/':
self._base += '/'
self._args = {}
self._page_limit = None
def _req(self):
return requests.get(self._base + '/raw', params=self._args)
def _parse_json(self, json):
for msg in json['raw_messages']:
yield Entry(msg)
# TODO(directxman12): define a __repr__
# def __repr__(self):
def __iter__(self):
g = copy.deepcopy(self)
pg = g._args.get('page', 1)
r = g._req()
json = r.json()
yield from g._parse_json(json)
total_pages = json['pages']
if g._page_limit is not None and total_pages > g._page_limit:
total_pages = g._page_limit
pg += 1
max_pg = total_pages + 1
while pg < max_pg:
g._args['page'] = pg
r = g._req()
json = r.json()
yield from g._parse_json(json)
pg += 1
# formatting
def take(self, pages):
if pages is None:
raise ValueError("You must specify a number of pages.")
g = copy.deepcopy(self)
g._page_limit = pages
return g
def skip(self, pages):
if pages is None:
pages = 0
g = copy.deepcopy(self)
g._args['page'] = pages + 1
return g
# TODO(directxman12): with_chrome? with_size?
@property
def ascending(self):
g = copy.deepcopy(self)
g._args['order'] = 'asc'
return g
@property
def descending(self):
g = copy.deepcopy(self)
g._args['order'] = 'desc'
return g
@property
def grouped(self):
g = copy.deepcopy(self)
g._args['grouped'] = 'true'
return g
# pagination
def paginate(self, rows):
g = copy.deepcopy(self)
g._args['rows_per_page'] = rows
return g
def starting_at(self, start):
if isinstance(start, datetime.datetime):
start = start.timestamp()
g = copy.deepcopy(self)
g._args['start'] = start
return g
def ending_at(self, end):
if isinstance(end, datetime.datetime):
end = end.timestamp()
g = copy.deepcopy(self)
g._args['end'] = end
return g
def delta_seconds(self, delta):
g = copy.deepcopy(self)
g._args['delta'] = delta
return g
_ALT_NAMES = {'containing': 'contains', 'rows': 'rows_per_page',
'paginate': 'rows_per_page', 'skip': 'page',
'starting_at': 'start', 'ending_at': 'end',
'delta_seconds': 'delta'}
def reset(self, name):
g = copy.deepcopy(self)
if name == 'take':
g._page_limit = None
else:
name = self._ALT_NAMES.get(name, name)
del g._args[name]
return g
# query
by_user = _filter_arg('user')
by_package = _filter_arg('package')
by_category = _filter_arg('category')
by_topic = _filter_arg('topic')
containing = _filter_arg('contains')
without_user = _filter_arg('not_user')
without_package = _filter_arg('not_package')
without_category = _filter_arg('not_category')
without_topic = _filter_arg('not_topic')
with_meta = _filter_arg('meta')
|
gpl-2.0
| -5,897,578,573,471,983,000
| 25.821596
| 77
| 0.560126
| false
| 3.816299
| false
| false
| false
|
electricity345/Full.Text.Classification.Thesis
|
src/classify.phrase.match/src/text.py
|
1
|
13082
|
import codecs
import htmlentitydefs
import json
import logging
import nltk # Uses NLTK Version 2.0b9
import os
import re
import unicodedata
class positive_match:
def __init__(self, match, offset):
self.match = match
self.offset = offset
def getMatch(self):
return self.match
def getOffset(self):
return self.offset
def printMatch(self):
log = logging.getLogger('classify')
log.debug("match = %s ;; offset = %s" % (self.match, self.offset))
class text:
def __init__(self, gdbm_files, filter_file, path, category):
self.category = category
self.filter_file = filter_file
self.gdbm_files = gdbm_files
self.path = path
return
def processUnicodeText(self, tokens):
log = logging.getLogger('classify')
log.debug("text.processUnicodeText()")
log.debug("tokens = %s" % tokens)
symbols = [".", "\&", "'", "-", "/", ","] # Punctuation that will be removed individually from each token
punctuation = {0x2018:0x27, 0x2019:0x27, 0x201C:0x22, 0x201D:0x22, 0x2014:0x2D} # Unicode to ASCII equivalent
matches = [] # All matches found in the document
# Takes a list of tokenized words and adds them into a hash with the key = token and value = location of token in text (offset)
for index in range(len(tokens)):
token_possibilities = []
log.debug("unmodifed token = %s ;; index = %s" % (tokens[index], index))
# Converts Unicode Punctuation to ASCII equivalent - ADD ENTRIES AS NECESSARY
token = tokens[index].translate(punctuation).encode('ascii', 'ignore')
log.debug("token translate = %s" % token)
token_possibilities.append(token)
# Converts Unicode to ASCII equivalent - If no equivalent is found, it ignores the unicode
token1 = unicodedata.normalize('NFKD', tokens[index]).encode('ascii', 'ignore')
log.debug("token normalize = %s" % token1)
if token != token1:
log.debug("token != token1")
token_possibilities.append(token1)
log.debug("token possibilities = %s" % token_possibilities)
for token in token_possibilities:
potential_match = []
offset_match = []
token = re.sub("[^\&/\w\d.',-]", "", token) # Removes all characters that aren't words, digits, ', ".", "-", "/", "&", or ","
token = token.lower()
log.debug("token = %s ;; index = %s" % (token, index))
if token == "":
log.debug("token is empty string")
continue
# If the chosen category is "geography", we optimize it so that it looks for the inital word to have their first letter upper-cased.
# This helps to reduce the number of false positives found.
# Case: City of Industry ;; (London)
if self.category == "geography" and tokens[index][0].isupper() == False:
if len(tokens[index]) > 1 and tokens[index][1].isupper() == False:
continue
# Peeks at the next 4 words to the current key's location and appends each word one at a time to see if it forms a word that
# is found in a related category dbm file
for offset in range(5):
if index + offset >= len(tokens):
break
single_word_possibilities = [] # Possible variants for a given word
# Gets word from text without any modifications to it
word = tokens[index + offset].lower()
word1 = word.translate(punctuation).encode('ascii', 'ignore')
log.debug("word 1 translate = %s" % word1)
if word1 != "":
single_word_possibilities.append(word1)
word2 = unicodedata.normalize('NFKD', word).encode('ascii', 'ignore')
log.debug("word 2 normalize = %s" % word2)
if word1 != word2:
log.debug("word1 != word2")
single_word_possibilities.append(word2)
offset_match.append(index + offset)
log.debug("word = %s ;; offset = %s" % (word, index + offset))
possible_words = single_word_possibilities[:] # Copies list
for word in single_word_possibilities:
# Removes all symbols except ".", ', "/", "-", and "," from the word in question
new_word = re.sub("[^\&/\w\d.',-]", "", word)
if new_word != word:
log.debug("[new_word != word] = %s" % new_word)
possible_words.append(new_word)
# Checks if the word has any punctuation specified. If it does, it removes each one of the punctutation individually and
# adds the newly created word back to the single_word_possiblities list for re-evalualtion.
if re.search("[\&/.',-]", new_word):
for element in symbols:
regular_expression = "[%s]" % element
if re.search(regular_expression, new_word):
new_words = re.split(regular_expression, new_word)
log.debug("new words = %s ;; re = %s" % (new_words, regular_expression))
for w in new_words:
new_word1 = w.rstrip().lstrip()
if new_word1 == "":
log.debug("new word is empty string")
continue
elif len(new_word1) < 2:
log.debug("new word has less than 2 characters = %s" % new_word1)
continue
element_seen = 0
for e in possible_words:
if new_word1 == e:
element_seen = 1
break
if element_seen == 0:
possible_words.append(new_word1)
single_word_possibilities.append(new_word1)
single_word_possibilities = possible_words[:]
log.debug("potential match - before = %s" % potential_match)
if not potential_match:
for word in single_word_possibilities:
potential_match.append(word)
elif single_word_possibilities:
tmp = []
for phrase in potential_match:
for word in single_word_possibilities:
potential_word = phrase + " " + word
tmp.append(potential_word)
potential_match = tmp
log.debug("potential match - after = %s" % potential_match)
# Iterates through all of the related category dbm files and sees if the potential match is found in any of them
# gdbm_files contains a list of gdbm_file objects that contain [path, gdbm_obj]
for gdbm_obj in self.gdbm_files:
for phrase in potential_match:
if phrase in gdbm_obj[1]:
log.debug("phrase matches = %s" % phrase)
log.debug("match offset = %s" % offset_match)
# Ignore matches that are just numbers
if phrase.isdigit():
log.debug("phrase match are digits = %s" % phrase)
continue
# If the chosen category is "geography," ignore matches that are found in the filter dbm file
if self.category == "geography" and phrase in self.filter_file:
log.debug("phrase match is in filter dbm = %s" % phrase)
continue
match_offset = offset_match[:] # Makes copy of offset_match
match_found = positive_match(phrase, match_offset)
matches.append(match_found)
# Eliminates duplicates found in the all matches by making sure that no two matches have the same offset
matches = sorted(matches, key=lambda positive_match: positive_match.offset)
all_matches = []
for match in matches:
found = 0
if not all_matches:
all_matches.append(match)
continue
match_offset = match.getOffset()
log.debug("match offset = %s" % match_offset)
for element in all_matches:
element_offset = element.getOffset()
for index in element_offset:
if match_offset[0] == index:
# The case where the offset is found in the previous stored entry and the current match has MORE words than the previous match
# (Ex) chicago and offset = [923] versus chicago bears and offset = [923, 924]
if len(match_offset) > len(element_offset):
found = 1
# The case where the offset is found in the previous stored entry and the current match has LESS words than the previous match
# (Ex) baltimore ravens and offset = [880, 881] versus ravens and offset = [881]
elif len(match_offset) < len(element_offset):
found = 2
# The case where the offset is found in previous stored entry and current match has the SAME number of words as the previous match
# (Ex) dallas and offset = [24] versus dallas and offset = [24]
elif len(match_offset) == len(element_offset) and match.getMatch() == element.getMatch():
found = 2
if found == 0: # The offsets have not been seen yet
all_matches.append(match)
elif found == 1:
all_matches[-1] = match
elif found == 2:
continue
return all_matches
# Processes an html file. Assumes the html file contains html entities that need to be escaped and converted to unicode.
# Function escapes html entities to unicode, tokenizes the entire text, and sends it for processing.
def processUnicodeString(self, string):
log = logging.getLogger('classify')
log.debug("text.processUnicodeString()")
# Html entities consist of the format &...; What we want is the ... portion. That is why we separated into a group in the RE.
string_unicode = re.sub("&(#?\\w+);", self.substituteEntity, string)
log.debug("string unicode = %s" % string_unicode)
token = nltk.tokenize.WhitespaceTokenizer().tokenize(string_unicode)
#token = nltk.wordpunct_tokenize(string_unicode)
matches = self.processUnicodeText(token)
return matches
# Processes a text file. Assumes that text file contains unescaped unicode literals.
# Function decodes text into unicode, tokenizes the entire text, and sends it for processing.
def processUTFString(self, string):
log = logging.getLogger('classify')
log.debug("text.processUTFString()")
log.debug("string = %s" % string)
string_utf = string.decode("utf-8")
log.debug("string utf = %s" % string_utf)
token = nltk.tokenize.WhitespaceTokenizer().tokenize(string_utf)
#token = nltk.wordpunct_tokenize(string_ascii)
matches = self.processUnicodeText(token)
return matches
# Function escapes all html entities and converts it to unicode
def substituteEntity(self, match):
log = logging.getLogger('classify')
name = match.group(1)
if name in htmlentitydefs.name2codepoint:
return unichr(htmlentitydefs.name2codepoint[name])
elif name.startswith("#"):
try:
return unichr(int(name[1:]))
except:
pass
log.debug("Cannot replace html entities with corresponding UTF-8 characters")
return '?'
|
mit
| -7,547,081,844,341,308,000
| 47.273063
| 154
| 0.515365
| false
| 4.836229
| false
| false
| false
|
dwdii/emotional-faces
|
src/emotion_model.py
|
1
|
20888
|
import csv
import os
import time
from scipy import misc
import keras.callbacks as cb
import keras.utils.np_utils as np_utils
from keras.models import Sequential
from keras.layers.convolutional import Convolution2D
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers import MaxPooling2D, ZeroPadding2D
from keras.preprocessing.image import ImageDataGenerator
def imageDataGenTransform(img, y):
# Using keras ImageDataGenerator to generate random images
datagen = ImageDataGenerator(
featurewise_std_normalization=False,
rotation_range = 20,
width_shift_range = 0.10,
height_shift_range = 0.10,
shear_range = 0.1,
zoom_range = 0.1,
horizontal_flip = True)
#x = img_to_array(img)
x = img.reshape(1, 1, img.shape[0], img.shape[1])
j = 0
for imgT, yT in datagen.flow(x, y, batch_size = 1, save_to_dir = None):
img2 = imgT
break
return img2
def emotion_model_v1(outputClasses, verbose=False):
"""https://www.kaggle.com/somshubramajumdar/digit-recognizer/deep-convolutional-network-using-keras"""
nb_pool = 2
nb_conv = 3
nb_filters_1 = 32
nb_filters_2 = 64
nb_filters_3 = 128
dropout = 0.25
#nb_classes = 10
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_2, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
model.add(Flatten())
model.add(Dropout(0.2))
model.add(Dense(128, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def emotion_model_v2(outputClasses, verbose=False):
"""https://www.kaggle.com/somshubramajumdar/digit-recognizer/deep-convolutional-network-using-keras"""
nb_pool = 2
nb_conv = 3
nb_filters_1 = 32
nb_filters_2 = 64
nb_filters_3 = 128
dropout = 0.25
#nb_classes = 10
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_2, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_3, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
model.add(Flatten())
model.add(Dropout(0.2))
model.add(Dense(128, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def emotion_model_v3(outputClasses, verbose=False):
"""https://www.kaggle.com/somshubramajumdar/digit-recognizer/deep-convolutional-network-using-keras"""
nb_pool = 2
nb_conv = 3
nb_filters_1 = 32
nb_filters_2 = 64
nb_filters_3 = 128
nb_filters_4 = 128
dropout = 0.25
#nb_classes = 10
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_2, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_3, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_4, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
model.add(Flatten())
model.add(Dropout(0.2))
model.add(Dense(128, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def emotion_model_v3_1(outputClasses, verbose=False):
nb_pool = 2
nb_conv = 3
nb_filters_1 = 32
nb_filters_2 = 64
nb_filters_3 = 128
nb_filters_4 = 128
dropout = 0.25
#nb_classes = 10
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
#model.add(ZeroPadding2D((1, 1), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu", input_shape=(1, 350, 350)))
model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_2, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_3, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_4, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dropout(0.2))
model.add(Dense(64, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def emotion_model_v3_2(outputClasses, verbose=False):
nb_pool = 2
nb_conv = 3
nb_filters_1 = 32
nb_filters_2 = 32
nb_filters_3 = 64
nb_filters_4 = 128
dropout = 0.25
#nb_classes = 10
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
#model.add(ZeroPadding2D((1, 1), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu", input_shape=(1, 350, 350)))
model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_2, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_3, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_4, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dropout(0.3))
model.add(Dense(64, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def emotion_model_v4(outputClasses, verbose=False):
nb_pool = 2
nb_conv = 3
nb_filters_1 = 32
nb_filters_2 = 64
nb_filters_3 = 128
nb_filters_4 = 256
nb_filters_5 = 256
dropout = 0.25
#nb_classes = 10
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_2, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_3, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_4, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(nb_filters_5, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
model.add(Flatten())
model.add(Dropout(0.2))
model.add(Dense(128, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def emotion_model_v5(outputClasses, verbose=False):
nb_pool = 2
nb_conv = 20
nb_filters_1 = 32
#nb_filters_2 = 64
#nb_filters_3 = 128
#nb_filters_4 = 256
#nb_filters_5 = 512
#dropout = 0.25
#nb_classes = 10
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
model.add(ZeroPadding2D((5, 5), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((10, 10)))
#model.add(Convolution2D(nb_filters_2, nb_conv, nb_conv, activation="relu"))
#model.add(MaxPooling2D(strides=(2, 2)))
model.add(Flatten())
#model.add(Dropout(0.25))
#model.add(Dense(nb_filters_5, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def emotion_model_v6(outputClasses, verbose=False):
nb_pool = 2
nb_conv = 30 # up from 20 to 30
nb_filters_1 = 32
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
model.add(ZeroPadding2D((5, 5), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((10, 10)))
#model.add(Convolution2D(nb_filters_2, nb_conv, nb_conv, activation="relu"))
#model.add(MaxPooling2D(strides=(2, 2)))
model.add(Flatten())
#model.add(Dropout(0.25))
#model.add(Dense(nb_filters_5, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def emotion_model_v7(outputClasses, verbose=False):
nb_pool = 2
nb_conv = 40 # up from 30 to 40
nb_filters_1 = 32
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
model.add(ZeroPadding2D((5, 5), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
#model.add(ZeroPadding2D((10, 10)))
#model.add(Convolution2D(nb_filters_2, nb_conv, nb_conv, activation="relu"))
#model.add(MaxPooling2D(strides=(2, 2)))
model.add(Flatten())
#model.add(Dropout(0.25))
#model.add(Dense(nb_filters_5, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def emotion_model_v8(outputClasses, verbose=False):
nb_pool = 2
nb_conv = 30 # Back to 30 from 40
nb_filters_1 = 32
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
model.add(ZeroPadding2D((5, 5), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(5, 5))) # 5,5 from 2,2
#model.add(ZeroPadding2D((10, 10)))
#model.add(Convolution2D(nb_filters_2, nb_conv, nb_conv, activation="relu"))
#model.add(MaxPooling2D(strides=(2, 2)))
model.add(Flatten())
#model.add(Dropout(0.25))
#model.add(Dense(nb_filters_5, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def emotion_model_v9(outputClasses, verbose=False):
nb_pool = 2
nb_conv = 30 # up from 20 to 30
nb_filters_1 = 32
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
model.add(ZeroPadding2D((5, 5), input_shape=(1, 350, 350), ))
model.add(Convolution2D(nb_filters_1, nb_conv, nb_conv, activation="relu"))
#model.add(MaxPooling2D(strides=(2, 2)))
model.add(ZeroPadding2D((5, 5)))
model.add(Convolution2D(32, nb_conv, nb_conv, activation="relu"))
model.add(MaxPooling2D(strides=(2, 2)))
model.add(Flatten())
#model.add(Dropout(0.25))
#model.add(Dense(nb_filters_5, activation="relu"))
model.add(Dense(outputClasses, activation="softmax"))
if verbose:
print (model.summary())
# rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer='adadelta',
metrics=['accuracy'])
print 'Model compiled in {0} seconds'.format(time.time() - start_time)
return model
def cnn_model_jhamski(outputClasses, input_shape=(3, 150, 150), verbose=False):
model = Sequential()
model.add(Convolution2D(32, 3, 3, input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(32, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(outputClasses))
model.add(Activation('softmax'))
if verbose:
print (model.summary())
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
return model
def emotion_model_jh_v2(outputClasses, input_shape=(3, 150, 150), verbose=False):
model = Sequential()
model.add(Convolution2D(32, 5, 5, input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(32, 5, 5))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 5, 5))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(outputClasses))
model.add(Activation('softmax'))
if verbose:
print (model.summary())
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
return model
def emotion_model_jh_v3(outputClasses, input_shape=(3, 150, 150), verbose=False):
model = Sequential()
model.add(Convolution2D(32, 5, 5, input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(32, 5, 5))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 5, 5))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 5, 5))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(outputClasses))
model.add(Activation('softmax'))
if verbose:
print (model.summary())
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
return model
def emotion_model_jh_v4(outputClasses, input_shape=(3, 150, 150), verbose=False):
model = Sequential()
model.add(Convolution2D(32, 8, 8, input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(32, 4, 4))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 2, 2))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 1, 1))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(outputClasses))
model.add(Activation('softmax'))
if verbose:
print (model.summary())
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
return model
def emotion_model_jh_v5(outputClasses, input_shape=(3, 150, 150), verbose=False):
model = Sequential()
model.add(Convolution2D(32, 8, 8, input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(32, 5, 5))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 2, 2))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
#model.add(Dropout(0.4))
model.add(Dense(outputClasses))
model.add(Activation('softmax'))
if verbose:
print (model.summary())
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
return model
class LossHistory(cb.Callback):
def on_train_begin(self, logs={}):
self.losses = []
def on_batch_end(self, batch, logs={}):
batch_loss = logs.get('loss')
self.losses.append(batch_loss)
def run_network(data, model, epochs=20, batch=256, verbosity=2):
"""
:param data: X_train, X_test, y_train, y_test
:param model:
:param epochs:
:param batch:
:return:
"""
try:
start_time = time.time()
history = LossHistory()
X_train, X_test, y_train, y_test = data
y_trainC = np_utils.to_categorical(y_train )
y_testC = np_utils.to_categorical(y_test)
print y_trainC.shape
print y_testC.shape
print 'Training model...'
model.fit(X_train, y_trainC, nb_epoch=epochs, batch_size=batch,
callbacks=[history],
validation_data=(X_test, y_testC), verbose=verbosity)
print "Training duration : {0}".format(time.time() - start_time)
score = model.evaluate(X_test, y_testC, batch_size=16, verbose=0)
print "Network's test score [loss, accuracy]: {0}".format(score)
return model, history.losses
except KeyboardInterrupt:
print ' KeyboardInterrupt'
return model, history.losses
|
apache-2.0
| 5,617,332,157,175,504,000
| 31.84434
| 106
| 0.632756
| false
| 3.237948
| true
| false
| false
|
mikesname/ehri-collections
|
ehriportal/portal/migrations/0014_auto__add_field_collection_notes__add_unique_collection_identifier_rep.py
|
1
|
17462
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Collection.notes'
db.add_column('portal_collection', 'notes',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding unique constraint on 'Collection', fields ['identifier', 'repository']
db.create_unique('portal_collection', ['identifier', 'repository_id'])
def backwards(self, orm):
# Removing unique constraint on 'Collection', fields ['identifier', 'repository']
db.delete_unique('portal_collection', ['identifier', 'repository_id'])
# Deleting field 'Collection.notes'
db.delete_column('portal_collection', 'notes')
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'portal.authority': {
'Meta': {'object_name': 'Authority', '_ormbases': ['portal.Resource']},
'dates_of_existence': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'functions': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'general_context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'institution_responsible_identifier': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'internal_structures': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'languages': ('jsonfield.fields.JSONField', [], {'default': "'[]'", 'blank': 'True'}),
'legal_status': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'lod': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'mandates': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'places': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'resource_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['portal.Resource']", 'unique': 'True', 'primary_key': 'True'}),
'revision_history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scripts': ('jsonfield.fields.JSONField', [], {'default': "'[]'", 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()'}),
'sources': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'type_of_entity': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'portal.collection': {
'Meta': {'unique_together': "(('identifier', 'repository'),)", 'object_name': 'Collection', '_ormbases': ['portal.Resource']},
'access_conditions': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'accruals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'acquisition': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'alternate_title': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'appraisal': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'archival_history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'arrangement': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Authority']", 'null': 'True', 'blank': 'True'}),
'edition': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'extent_and_medium': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'finding_aids': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'institution_responsible_identifier': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'languages': ('jsonfield.fields.JSONField', [], {'default': "'[]'", 'blank': 'True'}),
'languages_of_description': ('jsonfield.fields.JSONField', [], {'default': "'[]'", 'blank': 'True'}),
'location_of_copies': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'location_of_originals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'lod': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'physical_characteristics': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'related_units_of_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'repository': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Repository']"}),
'reproduction_conditions': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'resource_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['portal.Resource']", 'unique': 'True', 'primary_key': 'True'}),
'revision_history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rules': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scope_and_content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scripts': ('jsonfield.fields.JSONField', [], {'default': "'[]'", 'blank': 'True'}),
'scripts_of_description': ('jsonfield.fields.JSONField', [], {'default': "'[]'", 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()'}),
'sources': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'portal.contact': {
'Meta': {'object_name': 'Contact'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'contact_person': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'contact_type': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'country_code': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created_on': ('django.db.models.fields.DateTimeField', [], {}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'primary': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'repository': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Repository']"}),
'street_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'updated_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'portal.fuzzydate': {
'Meta': {'object_name': 'FuzzyDate'},
'circa': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'collection': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'date_set'", 'to': "orm['portal.Collection']"}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'end_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'precision': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'start_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'})
},
'portal.othername': {
'Meta': {'object_name': 'OtherName'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Resource']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'portal.place': {
'Meta': {'object_name': 'Place'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Resource']"})
},
'portal.property': {
'Meta': {'object_name': 'Property'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Resource']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'portal.relation': {
'Meta': {'object_name': 'Relation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['portal.Resource']"}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['portal.Resource']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'portal.repository': {
'Meta': {'object_name': 'Repository', '_ormbases': ['portal.Resource']},
'access_conditions': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'buildings': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'collecting_policies': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'dates_of_existence': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disabled_access': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'finding_aids': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'functions': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'general_context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'geocultural_context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'holdings': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'internal_structures': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'languages': ('jsonfield.fields.JSONField', [], {'default': "'[]'", 'blank': 'True'}),
'legal_status': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'lod': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'logo': ('portal.thumbs.ImageWithThumbsField', [], {'name': "'logo'", 'sizes': '((100, 100), (300, 300))', 'max_length': '100', 'blank': 'True', 'null': 'True'}),
'maintenance_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'mandates': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'opening_times': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'places': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'reproduction_services': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'research_services': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'resource_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['portal.Resource']", 'unique': 'True', 'primary_key': 'True'}),
'rules': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scripts': ('jsonfield.fields.JSONField', [], {'default': "'[]'", 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()'}),
'sources': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'type_of_entity': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'portal.resource': {
'Meta': {'object_name': 'Resource'},
'created_on': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'portal.resourceimage': {
'Meta': {'object_name': 'ResourceImage'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('portal.thumbs.ImageWithThumbsField', [], {'max_length': '100', 'name': "'image'", 'sizes': '((100, 100), (300, 300))'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Resource']"})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['portal']
|
mit
| 374,222,808,844,784,830
| 82.152381
| 174
| 0.542664
| false
| 3.655432
| false
| false
| false
|
brendano/twitter_geo_preproc
|
geo2_pipeline/preproc8/45_make_vocab/standalone_wc.py
|
1
|
1387
|
import re,sys,os,itertools
import ujson as json
## Umm, try to replicate config in 50_quadify ...
OPTS = {}
OPTS['min_date'] = '2009-08-03'
OPTS['max_date'] = '2012-09-30'
OPTS['msa_county_file'] = os.path.join(os.path.dirname(__file__), '../../../geo_metro/msa_counties.tsv')
OPTS['num_msas'] = 200
countyfips2regionid = {}
for line in open(OPTS['msa_county_file']).readlines()[1:]:
rank,name,countyfips = line.split('\t')
rank = int(rank)
if rank > OPTS['num_msas']: continue
countyfips = countyfips.strip().split(',')
for fips in countyfips:
countyfips2regionid[fips] = rank
def get_region(geodict):
county_fips = geodict['us_county']['geoid10']
return countyfips2regionid.get(county_fips, None)
def iterate_tweets():
for line in sys.stdin:
parts = line.rstrip('\n').split('\t')
date,user,geo,tweet = parts
date = date.split('T')[0]
if date < OPTS['min_date'] or date > OPTS['max_date']:
continue
region = get_region(json.loads(geo))
if region is None:
continue
yield user, tweet.split()
def stuff():
for user, tweets in itertools.groupby(iterate_tweets(), key=lambda (u,t): u):
wordset = set()
for _,toks in tweets:
for tok in toks:
wordset.add(tok)
for word in wordset:
print word
stuff()
|
mit
| -2,551,247,292,016,847,400
| 30.522727
| 104
| 0.599856
| false
| 3.116854
| false
| false
| false
|
pierg75/pier-sosreport
|
sos/plugins/selinux.py
|
1
|
2104
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from sos.plugins import Plugin, RedHatPlugin
class SELinux(Plugin, RedHatPlugin):
"""SELinux access control
"""
plugin_name = 'selinux'
profiles = ('system', 'security', 'openshift')
option_list = [("fixfiles", 'Print incorrect file context labels',
'slow', False)]
packages = ('libselinux',)
def setup(self):
self.add_copy_spec([
'/etc/sestatus.conf',
'/etc/selinux'
])
self.add_cmd_output('sestatus')
state = self.get_command_output('getenforce')['output']
if state is not 'Disabled':
self.add_cmd_output([
'ps auxZww',
'sestatus -v',
'sestatus -b',
'selinuxdefcon root',
'selinuxconlist root',
'selinuxexeccon /bin/passwd',
'semanage -o' # deprecated, may disappear at some point
])
subcmds = [
'fcontext',
'user',
'port',
'login',
'node',
'interface',
'module'
]
for subcmd in subcmds:
self.add_cmd_output("semanage %s -l" % subcmd)
if self.get_option('fixfiles'):
self.add_cmd_output("restorecon -Rvn /", stderr=False)
# vim: set et ts=4 sw=4 :
|
gpl-2.0
| -5,123,982,218,898,091,000
| 31.875
| 73
| 0.574144
| false
| 4.241935
| false
| false
| false
|
chipx86/the-cure
|
thecure/layers.py
|
1
|
5377
|
import pygame
class SpriteQuadTree(object):
def __init__(self, rect, depth=6, parent=None):
depth -= 1
self.rect = rect
self.sprites = []
self.parent = parent
self.depth = depth
self.cx = self.rect.centerx
self.cy = self.rect.centery
self._moved_cnxs = {}
self._next_stamp = 1
if depth == 0:
self.nw_tree = None
self.ne_tree = None
self.sw_tree = None
self.se_tree = None
else:
quad_size = (rect.width / 2, rect.height / 2)
self.nw_tree = SpriteQuadTree(
pygame.Rect(rect.x, rect.y, *quad_size),
depth, self)
self.ne_tree = SpriteQuadTree(
pygame.Rect(self.cx, rect.y, *quad_size),
depth, self)
self.sw_tree = SpriteQuadTree(
pygame.Rect(rect.x, self.cy, *quad_size),
depth, self)
self.se_tree = SpriteQuadTree(
pygame.Rect(self.cx, self.cy, *quad_size),
depth, self)
def add(self, sprite):
if not self.parent and sprite.can_move:
self._moved_cnxs[sprite] = sprite.moved.connect(
lambda dx, dy: self._recompute_sprite(sprite))
# If this is a leaf node or the sprite is overlapping all quadrants,
# store it in this QuadTree's list of sprites. If it's in fewer
# quadrants, go through and add to each that it touches.
if self.depth == 0:
trees = list(self._get_trees(sprite.rect))
assert len(trees) > 0
if len(trees) < 4:
for tree in trees:
tree.add(sprite)
return
assert sprite not in self.sprites
self.sprites.append(sprite)
sprite.quad_trees.add(self)
def remove(self, sprite):
if self.parent:
self.parent.remove(sprite)
return
assert sprite.quad_trees
for tree in sprite.quad_trees:
tree.sprites.remove(sprite)
sprite.quad_trees.clear()
if sprite.can_move:
cnx = self._moved_cnxs.pop(sprite)
cnx.disconnect()
def get_sprites(self, rect=None, stamp=None):
if stamp is None:
stamp = self._next_stamp
self._next_stamp += 1
for sprite in self.sprites:
if (getattr(sprite, '_quadtree_stamp', None) != stamp and
(rect is None or rect.colliderect(sprite.rect))):
sprite._quadtree_stamp = stamp
yield sprite
for tree in self._get_trees(rect):
for sprite in tree.get_sprites(rect, stamp):
yield sprite
def __iter__(self):
return self.get_sprites()
def _get_trees(self, rect):
if self.depth > 0:
if not rect or (rect.left <= self.cx and rect.top <= self.cy):
yield self.nw_tree
if not rect or (rect.right >= self.cx and rect.top <= self.cy):
yield self.ne_tree
if not rect or (rect.left <= self.cx and rect.bottom >= self.cy):
yield self.sw_tree
if not rect or (rect.right >= self.cx and rect.bottom >= self.cy):
yield self.se_tree
def _get_leaf_trees(self, rect):
trees = list(self._get_trees(rect))
if not trees or len(trees) == 4:
yield self
else:
for tree in trees:
for leaf in tree._get_leaf_trees(rect):
yield leaf
def _recompute_sprite(self, sprite):
assert sprite.quad_trees
if sprite.quad_trees != set(self._get_leaf_trees(sprite.rect)):
self.remove(sprite)
self.add(sprite)
class Layer(object):
def __init__(self, name, index, parent):
self.name = name
self.index = index
self.parent = parent
self.quad_tree = SpriteQuadTree(pygame.Rect(0, 0, *self.parent.size))
self.tick_sprites = []
def add(self, *objs):
for obj in objs:
obj.layer = self
self.update_sprite(obj)
if obj.use_quadtrees:
self.quad_tree.add(obj)
obj.on_added(self)
def remove(self, *objs):
for obj in objs:
self.update_sprite(obj, True)
if obj.use_quadtrees:
self.quad_tree.remove(obj)
obj.on_removed(self)
def update_sprite(self, sprite, force_remove=False):
assert sprite.layer == self
sprite.update_image()
if sprite.NEED_TICKS:
if sprite.visible and not force_remove:
self.tick_sprites.append(sprite)
else:
try:
self.tick_sprites.remove(sprite)
except ValueError:
# It may be gone now.
pass
def __iter__(self):
return iter(self.quad_tree)
def iterate_in_rect(self, rect):
return self.quad_tree.get_sprites(rect)
def tick(self):
for sprite in self.tick_sprites:
sprite.tick()
def start(self):
for sprite in self.quad_tree:
sprite.start()
def stop(self):
for sprite in self.quad_tree:
sprite.stop()
|
mit
| 5,568,242,331,991,032,000
| 28.222826
| 78
| 0.525386
| false
| 3.971196
| false
| false
| false
|
SnabbCo/neutron
|
neutron/db/l3_db.py
|
1
|
46915
|
# Copyright 2012 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc
from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.common import constants as l3_constants
from neutron.common import exceptions as n_exc
from neutron.common import utils
from neutron.db import model_base
from neutron.db import models_v2
from neutron.extensions import external_net
from neutron.extensions import l3
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.openstack.common.notifier import api as notifier_api
from neutron.openstack.common import uuidutils
from neutron.plugins.common import constants
LOG = logging.getLogger(__name__)
DEVICE_OWNER_ROUTER_INTF = l3_constants.DEVICE_OWNER_ROUTER_INTF
DEVICE_OWNER_ROUTER_GW = l3_constants.DEVICE_OWNER_ROUTER_GW
DEVICE_OWNER_FLOATINGIP = l3_constants.DEVICE_OWNER_FLOATINGIP
EXTERNAL_GW_INFO = l3.EXTERNAL_GW_INFO
# Maps API field to DB column
# API parameter name and Database column names may differ.
# Useful to keep the filtering between API and Database.
API_TO_DB_COLUMN_MAP = {'port_id': 'fixed_port_id'}
class Router(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant):
"""Represents a v2 neutron router."""
name = sa.Column(sa.String(255))
status = sa.Column(sa.String(16))
admin_state_up = sa.Column(sa.Boolean)
gw_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'))
gw_port = orm.relationship(models_v2.Port, lazy='joined')
class FloatingIP(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant):
"""Represents a floating IP address.
This IP address may or may not be allocated to a tenant, and may or
may not be associated with an internal port/ip address/router.
"""
floating_ip_address = sa.Column(sa.String(64), nullable=False)
floating_network_id = sa.Column(sa.String(36), nullable=False)
floating_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'),
nullable=False)
fixed_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'))
fixed_ip_address = sa.Column(sa.String(64))
router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id'))
# Additional attribute for keeping track of the router where the floating
# ip was associated in order to be able to ensure consistency even if an
# aysnchronous backend is unavailable when the floating IP is disassociated
last_known_router_id = sa.Column(sa.String(36))
status = sa.Column(sa.String(16))
class L3_NAT_db_mixin(l3.RouterPluginBase):
"""Mixin class to add L3/NAT router methods to db_plugin_base_v2."""
l3_rpc_notifier = l3_rpc_agent_api.L3AgentNotify
@property
def _core_plugin(self):
return manager.NeutronManager.get_plugin()
def _get_router(self, context, id):
try:
router = self._get_by_id(context, Router, id)
except exc.NoResultFound:
raise l3.RouterNotFound(router_id=id)
return router
def _make_router_dict(self, router, fields=None,
process_extensions=True):
res = {'id': router['id'],
'name': router['name'],
'tenant_id': router['tenant_id'],
'admin_state_up': router['admin_state_up'],
'status': router['status'],
EXTERNAL_GW_INFO: None,
'gw_port_id': router['gw_port_id']}
if router['gw_port_id']:
nw_id = router.gw_port['network_id']
res[EXTERNAL_GW_INFO] = {'network_id': nw_id}
# NOTE(salv-orlando): The following assumes this mixin is used in a
# class inheriting from CommonDbMixin, which is true for all existing
# plugins.
if process_extensions:
self._apply_dict_extend_functions(
l3.ROUTERS, res, router)
return self._fields(res, fields)
def create_router(self, context, router):
r = router['router']
has_gw_info = False
if EXTERNAL_GW_INFO in r:
has_gw_info = True
gw_info = r[EXTERNAL_GW_INFO]
del r[EXTERNAL_GW_INFO]
tenant_id = self._get_tenant_id_for_create(context, r)
with context.session.begin(subtransactions=True):
# pre-generate id so it will be available when
# configuring external gw port
router_db = Router(id=uuidutils.generate_uuid(),
tenant_id=tenant_id,
name=r['name'],
admin_state_up=r['admin_state_up'],
status="ACTIVE")
context.session.add(router_db)
if has_gw_info:
self._update_router_gw_info(context, router_db['id'], gw_info)
return self._make_router_dict(router_db, process_extensions=False)
def update_router(self, context, id, router):
r = router['router']
has_gw_info = False
gw_info = None
if EXTERNAL_GW_INFO in r:
has_gw_info = True
gw_info = r[EXTERNAL_GW_INFO]
del r[EXTERNAL_GW_INFO]
# check whether router needs and can be rescheduled to the proper
# l3 agent (associated with given external network);
# do check before update in DB as an exception will be raised
# in case no proper l3 agent found
candidates = None
if has_gw_info:
candidates = self._check_router_needs_rescheduling(
context, id, gw_info)
with context.session.begin(subtransactions=True):
if has_gw_info:
self._update_router_gw_info(context, id, gw_info)
router_db = self._get_router(context, id)
# Ensure we actually have something to update
if r.keys():
router_db.update(r)
if candidates:
l3_plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
l3_plugin.reschedule_router(context, id, candidates)
self.l3_rpc_notifier.routers_updated(
context, [router_db['id']])
return self._make_router_dict(router_db)
def _check_router_needs_rescheduling(self, context, router_id, gw_info):
"""Checks whether router's l3 agent can handle the given network
When external_network_bridge is set, each L3 agent can be associated
with at most one external network. If router's new external gateway
is on other network then the router needs to be rescheduled to the
proper l3 agent.
If external_network_bridge is not set then the agent
can support multiple external networks and rescheduling is not needed
:return: list of candidate agents if rescheduling needed,
None otherwise; raises exception if there is no eligible l3 agent
associated with target external network
"""
# TODO(obondarev): rethink placement of this func as l3 db manager is
# not really a proper place for agent scheduling stuff
network_id = gw_info.get('network_id') if gw_info else None
if not network_id:
return
nets = self._core_plugin.get_networks(
context, {external_net.EXTERNAL: [True]})
# nothing to do if there is only one external network
if len(nets) <= 1:
return
# first get plugin supporting l3 agent scheduling
# (either l3 service plugin or core_plugin)
l3_plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
if (not utils.is_extension_supported(
l3_plugin,
l3_constants.L3_AGENT_SCHEDULER_EXT_ALIAS) or
l3_plugin.router_scheduler is None):
# that might mean that we are dealing with non-agent-based
# implementation of l3 services
return
cur_agents = l3_plugin.list_l3_agents_hosting_router(
context, router_id)['agents']
for agent in cur_agents:
ext_net_id = agent['configurations'].get(
'gateway_external_network_id')
ext_bridge = agent['configurations'].get(
'external_network_bridge', 'br-ex')
if (ext_net_id == network_id or
(not ext_net_id and not ext_bridge)):
return
# otherwise find l3 agent with matching gateway_external_network_id
active_agents = l3_plugin.get_l3_agents(context, active=True)
router = {
'id': router_id,
'external_gateway_info': {'network_id': network_id}
}
candidates = l3_plugin.get_l3_agent_candidates(
router, active_agents)
if not candidates:
msg = (_('No eligible l3 agent associated with external network '
'%s found') % network_id)
raise n_exc.BadRequest(resource='router', msg=msg)
return candidates
def _create_router_gw_port(self, context, router, network_id):
# Port has no 'tenant-id', as it is hidden from user
gw_port = self._core_plugin.create_port(context.elevated(), {
'port': {'tenant_id': '', # intentionally not set
'network_id': network_id,
'mac_address': attributes.ATTR_NOT_SPECIFIED,
'fixed_ips': attributes.ATTR_NOT_SPECIFIED,
'device_id': router['id'],
'device_owner': DEVICE_OWNER_ROUTER_GW,
'admin_state_up': True,
'name': ''}})
if not gw_port['fixed_ips']:
self._core_plugin.delete_port(context.elevated(), gw_port['id'],
l3_port_check=False)
msg = (_('No IPs available for external network %s') %
network_id)
raise n_exc.BadRequest(resource='router', msg=msg)
with context.session.begin(subtransactions=True):
router.gw_port = self._core_plugin._get_port(context.elevated(),
gw_port['id'])
context.session.add(router)
def _update_router_gw_info(self, context, router_id, info, router=None):
# TODO(salvatore-orlando): guarantee atomic behavior also across
# operations that span beyond the model classes handled by this
# class (e.g.: delete_port)
router = router or self._get_router(context, router_id)
gw_port = router.gw_port
# network_id attribute is required by API, so it must be present
network_id = info['network_id'] if info else None
if network_id:
network_db = self._core_plugin._get_network(context, network_id)
if not network_db.external:
msg = _("Network %s is not a valid external "
"network") % network_id
raise n_exc.BadRequest(resource='router', msg=msg)
# figure out if we need to delete existing port
if gw_port and gw_port['network_id'] != network_id:
fip_count = self.get_floatingips_count(context.elevated(),
{'router_id': [router_id]})
if fip_count:
raise l3.RouterExternalGatewayInUseByFloatingIp(
router_id=router_id, net_id=gw_port['network_id'])
with context.session.begin(subtransactions=True):
router.gw_port = None
context.session.add(router)
self._core_plugin.delete_port(context.elevated(),
gw_port['id'],
l3_port_check=False)
if network_id is not None and (gw_port is None or
gw_port['network_id'] != network_id):
subnets = self._core_plugin._get_subnets_by_network(context,
network_id)
for subnet in subnets:
self._check_for_dup_router_subnet(context, router_id,
network_id, subnet['id'],
subnet['cidr'])
self._create_router_gw_port(context, router, network_id)
def delete_router(self, context, id):
with context.session.begin(subtransactions=True):
router = self._get_router(context, id)
# Ensure that the router is not used
fips = self.get_floatingips_count(context.elevated(),
filters={'router_id': [id]})
if fips:
raise l3.RouterInUse(router_id=id)
device_filter = {'device_id': [id],
'device_owner': [DEVICE_OWNER_ROUTER_INTF]}
ports = self._core_plugin.get_ports_count(context.elevated(),
filters=device_filter)
if ports:
raise l3.RouterInUse(router_id=id)
#TODO(nati) Refactor here when we have router insertion model
vpnservice = manager.NeutronManager.get_service_plugins().get(
constants.VPN)
if vpnservice:
vpnservice.check_router_in_use(context, id)
context.session.delete(router)
# Delete the gw port after the router has been removed to
# avoid a constraint violation.
device_filter = {'device_id': [id],
'device_owner': [DEVICE_OWNER_ROUTER_GW]}
ports = self._core_plugin.get_ports(context.elevated(),
filters=device_filter)
if ports:
self._core_plugin._delete_port(context.elevated(),
ports[0]['id'])
self.l3_rpc_notifier.router_deleted(context, id)
def get_router(self, context, id, fields=None):
router = self._get_router(context, id)
return self._make_router_dict(router, fields)
def get_routers(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'router', limit, marker)
return self._get_collection(context, Router,
self._make_router_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def get_routers_count(self, context, filters=None):
return self._get_collection_count(context, Router,
filters=filters)
def _check_for_dup_router_subnet(self, context, router_id,
network_id, subnet_id, subnet_cidr):
try:
rport_qry = context.session.query(models_v2.Port)
rports = rport_qry.filter_by(device_id=router_id)
# It's possible these ports are on the same network, but
# different subnets.
new_ipnet = netaddr.IPNetwork(subnet_cidr)
for p in rports:
for ip in p['fixed_ips']:
if ip['subnet_id'] == subnet_id:
msg = (_("Router already has a port on subnet %s")
% subnet_id)
raise n_exc.BadRequest(resource='router', msg=msg)
sub_id = ip['subnet_id']
cidr = self._core_plugin._get_subnet(context.elevated(),
sub_id)['cidr']
ipnet = netaddr.IPNetwork(cidr)
match1 = netaddr.all_matching_cidrs(new_ipnet, [cidr])
match2 = netaddr.all_matching_cidrs(ipnet, [subnet_cidr])
if match1 or match2:
data = {'subnet_cidr': subnet_cidr,
'subnet_id': subnet_id,
'cidr': cidr,
'sub_id': sub_id}
msg = (_("Cidr %(subnet_cidr)s of subnet "
"%(subnet_id)s overlaps with cidr %(cidr)s "
"of subnet %(sub_id)s") % data)
raise n_exc.BadRequest(resource='router', msg=msg)
except exc.NoResultFound:
pass
def add_router_interface(self, context, router_id, interface_info):
if not interface_info:
msg = _("Either subnet_id or port_id must be specified")
raise n_exc.BadRequest(resource='router', msg=msg)
if 'port_id' in interface_info:
# make sure port update is committed
with context.session.begin(subtransactions=True):
if 'subnet_id' in interface_info:
msg = _("Cannot specify both subnet-id and port-id")
raise n_exc.BadRequest(resource='router', msg=msg)
port = self._core_plugin._get_port(context,
interface_info['port_id'])
if port['device_id']:
raise n_exc.PortInUse(net_id=port['network_id'],
port_id=port['id'],
device_id=port['device_id'])
fixed_ips = [ip for ip in port['fixed_ips']]
if len(fixed_ips) != 1:
msg = _('Router port must have exactly one fixed IP')
raise n_exc.BadRequest(resource='router', msg=msg)
subnet_id = fixed_ips[0]['subnet_id']
subnet = self._core_plugin._get_subnet(context, subnet_id)
self._check_for_dup_router_subnet(context, router_id,
port['network_id'],
subnet['id'],
subnet['cidr'])
port.update({'device_id': router_id,
'device_owner': DEVICE_OWNER_ROUTER_INTF})
elif 'subnet_id' in interface_info:
subnet_id = interface_info['subnet_id']
subnet = self._core_plugin._get_subnet(context, subnet_id)
# Ensure the subnet has a gateway
if not subnet['gateway_ip']:
msg = _('Subnet for router interface must have a gateway IP')
raise n_exc.BadRequest(resource='router', msg=msg)
self._check_for_dup_router_subnet(context, router_id,
subnet['network_id'],
subnet_id,
subnet['cidr'])
fixed_ip = {'ip_address': subnet['gateway_ip'],
'subnet_id': subnet['id']}
port = self._core_plugin.create_port(context, {
'port':
{'tenant_id': subnet['tenant_id'],
'network_id': subnet['network_id'],
'fixed_ips': [fixed_ip],
'mac_address': attributes.ATTR_NOT_SPECIFIED,
'admin_state_up': True,
'device_id': router_id,
'device_owner': DEVICE_OWNER_ROUTER_INTF,
'name': ''}})
self.l3_rpc_notifier.routers_updated(
context, [router_id], 'add_router_interface')
info = {'id': router_id,
'tenant_id': subnet['tenant_id'],
'port_id': port['id'],
'subnet_id': port['fixed_ips'][0]['subnet_id']}
notifier_api.notify(context,
notifier_api.publisher_id('network'),
'router.interface.create',
notifier_api.CONF.default_notification_level,
{'router_interface': info})
return info
def _confirm_router_interface_not_in_use(self, context, router_id,
subnet_id):
subnet_db = self._core_plugin._get_subnet(context, subnet_id)
subnet_cidr = netaddr.IPNetwork(subnet_db['cidr'])
fip_qry = context.session.query(FloatingIP)
for fip_db in fip_qry.filter_by(router_id=router_id):
if netaddr.IPAddress(fip_db['fixed_ip_address']) in subnet_cidr:
raise l3.RouterInterfaceInUseByFloatingIP(
router_id=router_id, subnet_id=subnet_id)
def remove_router_interface(self, context, router_id, interface_info):
if not interface_info:
msg = _("Either subnet_id or port_id must be specified")
raise n_exc.BadRequest(resource='router', msg=msg)
if 'port_id' in interface_info:
port_id = interface_info['port_id']
port_db = self._core_plugin._get_port(context, port_id)
if not (port_db['device_owner'] == DEVICE_OWNER_ROUTER_INTF and
port_db['device_id'] == router_id):
raise l3.RouterInterfaceNotFound(router_id=router_id,
port_id=port_id)
if 'subnet_id' in interface_info:
port_subnet_id = port_db['fixed_ips'][0]['subnet_id']
if port_subnet_id != interface_info['subnet_id']:
raise n_exc.SubnetMismatchForPort(
port_id=port_id,
subnet_id=interface_info['subnet_id'])
subnet_id = port_db['fixed_ips'][0]['subnet_id']
subnet = self._core_plugin._get_subnet(context, subnet_id)
self._confirm_router_interface_not_in_use(
context, router_id, subnet_id)
self._core_plugin.delete_port(context, port_db['id'],
l3_port_check=False)
elif 'subnet_id' in interface_info:
subnet_id = interface_info['subnet_id']
self._confirm_router_interface_not_in_use(context, router_id,
subnet_id)
subnet = self._core_plugin._get_subnet(context, subnet_id)
found = False
try:
rport_qry = context.session.query(models_v2.Port)
ports = rport_qry.filter_by(
device_id=router_id,
device_owner=DEVICE_OWNER_ROUTER_INTF,
network_id=subnet['network_id'])
for p in ports:
if p['fixed_ips'][0]['subnet_id'] == subnet_id:
port_id = p['id']
self._core_plugin.delete_port(context, p['id'],
l3_port_check=False)
found = True
break
except exc.NoResultFound:
pass
if not found:
raise l3.RouterInterfaceNotFoundForSubnet(router_id=router_id,
subnet_id=subnet_id)
self.l3_rpc_notifier.routers_updated(
context, [router_id], 'remove_router_interface')
info = {'id': router_id,
'tenant_id': subnet['tenant_id'],
'port_id': port_id,
'subnet_id': subnet_id}
notifier_api.notify(context,
notifier_api.publisher_id('network'),
'router.interface.delete',
notifier_api.CONF.default_notification_level,
{'router_interface': info})
return info
def _get_floatingip(self, context, id):
try:
floatingip = self._get_by_id(context, FloatingIP, id)
except exc.NoResultFound:
raise l3.FloatingIPNotFound(floatingip_id=id)
return floatingip
def _make_floatingip_dict(self, floatingip, fields=None):
res = {'id': floatingip['id'],
'tenant_id': floatingip['tenant_id'],
'floating_ip_address': floatingip['floating_ip_address'],
'floating_network_id': floatingip['floating_network_id'],
'router_id': floatingip['router_id'],
'port_id': floatingip['fixed_port_id'],
'fixed_ip_address': floatingip['fixed_ip_address'],
'status': floatingip['status']}
return self._fields(res, fields)
def _get_router_for_floatingip(self, context, internal_port,
internal_subnet_id,
external_network_id):
subnet_db = self._core_plugin._get_subnet(context,
internal_subnet_id)
if not subnet_db['gateway_ip']:
msg = (_('Cannot add floating IP to port on subnet %s '
'which has no gateway_ip') % internal_subnet_id)
raise n_exc.BadRequest(resource='floatingip', msg=msg)
# find router interface ports on this network
router_intf_qry = context.session.query(models_v2.Port)
router_intf_ports = router_intf_qry.filter_by(
network_id=internal_port['network_id'],
device_owner=DEVICE_OWNER_ROUTER_INTF)
for intf_p in router_intf_ports:
if intf_p['fixed_ips'][0]['subnet_id'] == internal_subnet_id:
router_id = intf_p['device_id']
router_gw_qry = context.session.query(models_v2.Port)
has_gw_port = router_gw_qry.filter_by(
network_id=external_network_id,
device_id=router_id,
device_owner=DEVICE_OWNER_ROUTER_GW).count()
if has_gw_port:
return router_id
raise l3.ExternalGatewayForFloatingIPNotFound(
subnet_id=internal_subnet_id,
external_network_id=external_network_id,
port_id=internal_port['id'])
def _internal_fip_assoc_data(self, context, fip):
"""Retrieve internal port data for floating IP.
Retrieve information concerning the internal port where
the floating IP should be associated to.
"""
internal_port = self._core_plugin._get_port(context, fip['port_id'])
if not internal_port['tenant_id'] == fip['tenant_id']:
port_id = fip['port_id']
if 'id' in fip:
floatingip_id = fip['id']
data = {'port_id': port_id,
'floatingip_id': floatingip_id}
msg = (_('Port %(port_id)s is associated with a different '
'tenant than Floating IP %(floatingip_id)s and '
'therefore cannot be bound.') % data)
else:
msg = (_('Cannot create floating IP and bind it to '
'Port %s, since that port is owned by a '
'different tenant.') % port_id)
raise n_exc.BadRequest(resource='floatingip', msg=msg)
internal_subnet_id = None
if 'fixed_ip_address' in fip and fip['fixed_ip_address']:
internal_ip_address = fip['fixed_ip_address']
for ip in internal_port['fixed_ips']:
if ip['ip_address'] == internal_ip_address:
internal_subnet_id = ip['subnet_id']
if not internal_subnet_id:
msg = (_('Port %(id)s does not have fixed ip %(address)s') %
{'id': internal_port['id'],
'address': internal_ip_address})
raise n_exc.BadRequest(resource='floatingip', msg=msg)
else:
ips = [ip['ip_address'] for ip in internal_port['fixed_ips']]
if not ips:
msg = (_('Cannot add floating IP to port %s that has'
'no fixed IP addresses') % internal_port['id'])
raise n_exc.BadRequest(resource='floatingip', msg=msg)
if len(ips) > 1:
msg = (_('Port %s has multiple fixed IPs. Must provide'
' a specific IP when assigning a floating IP') %
internal_port['id'])
raise n_exc.BadRequest(resource='floatingip', msg=msg)
internal_ip_address = internal_port['fixed_ips'][0]['ip_address']
internal_subnet_id = internal_port['fixed_ips'][0]['subnet_id']
return internal_port, internal_subnet_id, internal_ip_address
def get_assoc_data(self, context, fip, floating_network_id):
"""Determine/extract data associated with the internal port.
When a floating IP is associated with an internal port,
we need to extract/determine some data associated with the
internal port, including the internal_ip_address, and router_id.
We also need to confirm that this internal port is owned by the
tenant who owns the floating IP.
"""
(internal_port, internal_subnet_id,
internal_ip_address) = self._internal_fip_assoc_data(context, fip)
router_id = self._get_router_for_floatingip(context,
internal_port,
internal_subnet_id,
floating_network_id)
# confirm that this router has a floating
# ip enabled gateway with support for this floating IP network
try:
port_qry = context.elevated().session.query(models_v2.Port)
port_qry.filter_by(
network_id=floating_network_id,
device_id=router_id,
device_owner=DEVICE_OWNER_ROUTER_GW).one()
except exc.NoResultFound:
raise l3.ExternalGatewayForFloatingIPNotFound(
subnet_id=internal_subnet_id,
port_id=internal_port['id'])
return (fip['port_id'], internal_ip_address, router_id)
def _update_fip_assoc(self, context, fip, floatingip_db, external_port):
previous_router_id = floatingip_db.router_id
port_id = internal_ip_address = router_id = None
if (('fixed_ip_address' in fip and fip['fixed_ip_address']) and
not ('port_id' in fip and fip['port_id'])):
msg = _("fixed_ip_address cannot be specified without a port_id")
raise n_exc.BadRequest(resource='floatingip', msg=msg)
if 'port_id' in fip and fip['port_id']:
port_id, internal_ip_address, router_id = self.get_assoc_data(
context,
fip,
floatingip_db['floating_network_id'])
fip_qry = context.session.query(FloatingIP)
try:
fip_qry.filter_by(
fixed_port_id=fip['port_id'],
floating_network_id=floatingip_db['floating_network_id'],
fixed_ip_address=internal_ip_address).one()
raise l3.FloatingIPPortAlreadyAssociated(
port_id=fip['port_id'],
fip_id=floatingip_db['id'],
floating_ip_address=floatingip_db['floating_ip_address'],
fixed_ip=internal_ip_address,
net_id=floatingip_db['floating_network_id'])
except exc.NoResultFound:
pass
floatingip_db.update({'fixed_ip_address': internal_ip_address,
'fixed_port_id': port_id,
'router_id': router_id,
'last_known_router_id': previous_router_id})
def create_floatingip(
self, context, floatingip,
initial_status=l3_constants.FLOATINGIP_STATUS_ACTIVE):
fip = floatingip['floatingip']
tenant_id = self._get_tenant_id_for_create(context, fip)
fip_id = uuidutils.generate_uuid()
f_net_id = fip['floating_network_id']
if not self._core_plugin._network_is_external(context, f_net_id):
msg = _("Network %s is not a valid external network") % f_net_id
raise n_exc.BadRequest(resource='floatingip', msg=msg)
with context.session.begin(subtransactions=True):
# This external port is never exposed to the tenant.
# it is used purely for internal system and admin use when
# managing floating IPs.
external_port = self._core_plugin.create_port(context.elevated(), {
'port':
{'tenant_id': '', # tenant intentionally not set
'network_id': f_net_id,
'mac_address': attributes.ATTR_NOT_SPECIFIED,
'fixed_ips': attributes.ATTR_NOT_SPECIFIED,
'admin_state_up': True,
'device_id': fip_id,
'device_owner': DEVICE_OWNER_FLOATINGIP,
'name': ''}})
# Ensure IP addresses are allocated on external port
if not external_port['fixed_ips']:
raise n_exc.ExternalIpAddressExhausted(net_id=f_net_id)
floating_fixed_ip = external_port['fixed_ips'][0]
floating_ip_address = floating_fixed_ip['ip_address']
floatingip_db = FloatingIP(
id=fip_id,
tenant_id=tenant_id,
status=initial_status,
floating_network_id=fip['floating_network_id'],
floating_ip_address=floating_ip_address,
floating_port_id=external_port['id'])
fip['tenant_id'] = tenant_id
# Update association with internal port
# and define external IP address
self._update_fip_assoc(context, fip,
floatingip_db, external_port)
context.session.add(floatingip_db)
router_id = floatingip_db['router_id']
if router_id:
self.l3_rpc_notifier.routers_updated(
context, [router_id],
'create_floatingip')
return self._make_floatingip_dict(floatingip_db)
def update_floatingip(self, context, id, floatingip):
fip = floatingip['floatingip']
with context.session.begin(subtransactions=True):
floatingip_db = self._get_floatingip(context, id)
fip['tenant_id'] = floatingip_db['tenant_id']
fip['id'] = id
fip_port_id = floatingip_db['floating_port_id']
before_router_id = floatingip_db['router_id']
self._update_fip_assoc(context, fip, floatingip_db,
self._core_plugin.get_port(
context.elevated(), fip_port_id))
router_ids = []
if before_router_id:
router_ids.append(before_router_id)
router_id = floatingip_db['router_id']
if router_id and router_id != before_router_id:
router_ids.append(router_id)
if router_ids:
self.l3_rpc_notifier.routers_updated(
context, router_ids, 'update_floatingip')
return self._make_floatingip_dict(floatingip_db)
def update_floatingip_status(self, context, floatingip_id, status):
"""Update operational status for floating IP in neutron DB."""
fip_query = self._model_query(context, FloatingIP).filter(
FloatingIP.id == floatingip_id)
fip_query.update({'status': status}, synchronize_session=False)
def delete_floatingip(self, context, id):
floatingip = self._get_floatingip(context, id)
router_id = floatingip['router_id']
with context.session.begin(subtransactions=True):
context.session.delete(floatingip)
self._core_plugin.delete_port(context.elevated(),
floatingip['floating_port_id'],
l3_port_check=False)
if router_id:
self.l3_rpc_notifier.routers_updated(
context, [router_id],
'delete_floatingip')
def get_floatingip(self, context, id, fields=None):
floatingip = self._get_floatingip(context, id)
return self._make_floatingip_dict(floatingip, fields)
def get_floatingips(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'floatingip', limit,
marker)
if filters is not None:
for key, val in API_TO_DB_COLUMN_MAP.iteritems():
if key in filters:
filters[val] = filters.pop(key)
return self._get_collection(context, FloatingIP,
self._make_floatingip_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def delete_disassociated_floatingips(self, context, network_id):
query = self._model_query(context, FloatingIP)
query = query.filter_by(floating_network_id=network_id,
fixed_port_id=None,
router_id=None)
for fip in query:
self.delete_floatingip(context, fip.id)
def get_floatingips_count(self, context, filters=None):
return self._get_collection_count(context, FloatingIP,
filters=filters)
def prevent_l3_port_deletion(self, context, port_id):
"""Checks to make sure a port is allowed to be deleted.
Raises an exception if this is not the case. This should be called by
any plugin when the API requests the deletion of a port, since some
ports for L3 are not intended to be deleted directly via a DELETE
to /ports, but rather via other API calls that perform the proper
deletion checks.
"""
port_db = self._core_plugin._get_port(context, port_id)
if port_db['device_owner'] in [DEVICE_OWNER_ROUTER_INTF,
DEVICE_OWNER_ROUTER_GW,
DEVICE_OWNER_FLOATINGIP]:
# Raise port in use only if the port has IP addresses
# Otherwise it's a stale port that can be removed
fixed_ips = port_db['fixed_ips']
if fixed_ips:
raise l3.L3PortInUse(port_id=port_id,
device_owner=port_db['device_owner'])
else:
LOG.debug(_("Port %(port_id)s has owner %(port_owner)s, but "
"no IP address, so it can be deleted"),
{'port_id': port_db['id'],
'port_owner': port_db['device_owner']})
def disassociate_floatingips(self, context, port_id):
router_ids = set()
with context.session.begin(subtransactions=True):
fip_qry = context.session.query(FloatingIP)
floating_ips = fip_qry.filter_by(fixed_port_id=port_id)
for floating_ip in floating_ips:
router_ids.add(floating_ip['router_id'])
floating_ip.update({'fixed_port_id': None,
'fixed_ip_address': None,
'router_id': None})
if router_ids:
self.l3_rpc_notifier.routers_updated(
context, list(router_ids),
'disassociate_floatingips')
def _build_routers_list(self, routers, gw_ports):
gw_port_id_gw_port_dict = dict((gw_port['id'], gw_port)
for gw_port in gw_ports)
for router in routers:
gw_port_id = router['gw_port_id']
if gw_port_id:
router['gw_port'] = gw_port_id_gw_port_dict[gw_port_id]
return routers
def _get_sync_routers(self, context, router_ids=None, active=None):
"""Query routers and their gw ports for l3 agent.
Query routers with the router_ids. The gateway ports, if any,
will be queried too.
l3 agent has an option to deal with only one router id. In addition,
when we need to notify the agent the data about only one router
(when modification of router, its interfaces, gw_port and floatingips),
we will have router_ids.
@param router_ids: the list of router ids which we want to query.
if it is None, all of routers will be queried.
@return: a list of dicted routers with dicted gw_port populated if any
"""
filters = {'id': router_ids} if router_ids else {}
if active is not None:
filters['admin_state_up'] = [active]
router_dicts = self.get_routers(context, filters=filters)
gw_port_ids = []
if not router_dicts:
return []
for router_dict in router_dicts:
gw_port_id = router_dict['gw_port_id']
if gw_port_id:
gw_port_ids.append(gw_port_id)
gw_ports = []
if gw_port_ids:
gw_ports = self.get_sync_gw_ports(context, gw_port_ids)
return self._build_routers_list(router_dicts, gw_ports)
def _get_sync_floating_ips(self, context, router_ids):
"""Query floating_ips that relate to list of router_ids."""
if not router_ids:
return []
return self.get_floatingips(context, {'router_id': router_ids})
def get_sync_gw_ports(self, context, gw_port_ids):
if not gw_port_ids:
return []
filters = {'id': gw_port_ids}
gw_ports = self._core_plugin.get_ports(context, filters)
if gw_ports:
self._populate_subnet_for_ports(context, gw_ports)
return gw_ports
def get_sync_interfaces(self, context, router_ids,
device_owner=DEVICE_OWNER_ROUTER_INTF):
"""Query router interfaces that relate to list of router_ids."""
if not router_ids:
return []
filters = {'device_id': router_ids,
'device_owner': [device_owner]}
interfaces = self._core_plugin.get_ports(context, filters)
if interfaces:
self._populate_subnet_for_ports(context, interfaces)
return interfaces
def _populate_subnet_for_ports(self, context, ports):
"""Populate ports with subnet.
These ports already have fixed_ips populated.
"""
if not ports:
return
subnet_id_ports_dict = {}
for port in ports:
fixed_ips = port.get('fixed_ips', [])
if len(fixed_ips) > 1:
LOG.info(_("Ignoring multiple IPs on router port %s"),
port['id'])
continue
elif not fixed_ips:
# Skip ports without IPs, which can occur if a subnet
# attached to a router is deleted
LOG.info(_("Skipping port %s as no IP is configure on it"),
port['id'])
continue
fixed_ip = fixed_ips[0]
my_ports = subnet_id_ports_dict.get(fixed_ip['subnet_id'], [])
my_ports.append(port)
subnet_id_ports_dict[fixed_ip['subnet_id']] = my_ports
if not subnet_id_ports_dict:
return
filters = {'id': subnet_id_ports_dict.keys()}
fields = ['id', 'cidr', 'gateway_ip']
subnet_dicts = self._core_plugin.get_subnets(context, filters, fields)
for subnet_dict in subnet_dicts:
ports = subnet_id_ports_dict.get(subnet_dict['id'], [])
for port in ports:
# TODO(gongysh) stash the subnet into fixed_ips
# to make the payload smaller.
port['subnet'] = {'id': subnet_dict['id'],
'cidr': subnet_dict['cidr'],
'gateway_ip': subnet_dict['gateway_ip']}
def _process_sync_data(self, routers, interfaces, floating_ips):
routers_dict = {}
for router in routers:
routers_dict[router['id']] = router
for floating_ip in floating_ips:
router = routers_dict.get(floating_ip['router_id'])
if router:
router_floatingips = router.get(l3_constants.FLOATINGIP_KEY,
[])
router_floatingips.append(floating_ip)
router[l3_constants.FLOATINGIP_KEY] = router_floatingips
for interface in interfaces:
router = routers_dict.get(interface['device_id'])
if router:
router_interfaces = router.get(l3_constants.INTERFACE_KEY, [])
router_interfaces.append(interface)
router[l3_constants.INTERFACE_KEY] = router_interfaces
return routers_dict.values()
def get_sync_data(self, context, router_ids=None, active=None):
"""Query routers and their related floating_ips, interfaces."""
with context.session.begin(subtransactions=True):
routers = self._get_sync_routers(context,
router_ids=router_ids,
active=active)
router_ids = [router['id'] for router in routers]
floating_ips = self._get_sync_floating_ips(context, router_ids)
interfaces = self.get_sync_interfaces(context, router_ids)
return self._process_sync_data(routers, interfaces, floating_ips)
|
apache-2.0
| -7,870,026,785,524,489,000
| 46.581136
| 79
| 0.544431
| false
| 4.279394
| false
| false
| false
|
SunPower/Carousel
|
simkit/contrib/readers.py
|
1
|
6137
|
"""
Custom data readers including :class:`simkit.contrib.readers.ArgumentReader`,
:class:`simkit.contrib.readers.DjangoModelReader` and
:class:`simkit.contrib.readers.HDF5Reader`.
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import numpy as np
import h5py
from simkit.core.data_readers import DataReader
from simkit.core.data_sources import DataParameter
from simkit.core import Q_
import logging
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG)
def copy_model_instance(obj):
"""
Copy Django model instance as a dictionary excluding automatically created
fields like an auto-generated sequence as a primary key or an auto-created
many-to-one reverse relation.
:param obj: Django model object
:return: copy of model instance as dictionary
"""
meta = getattr(obj, '_meta') # make pycharm happy
# dictionary of model values excluding auto created and related fields
return {f.name: getattr(obj, f.name)
for f in meta.get_fields(include_parents=False)
if not f.auto_created}
# TODO: make parameters consistent for all readers
# TODO: parameters set by attributes in data source model fields
# EG: ghi = FloatField('GHI', units='W/m**2')
# EG: solar_azimuth = FloatField('solar azimuth', units='degrees')
# TODO: some parameters set in class Meta
# EG: class Meta: args = ['GHI', 'azimuth']
class ArgumentReader(DataReader):
"""
Read arguments passed directly to a simulation.
The argument parameters dictionary should have two keys: `args` and `kwargs`
which consist of the names and attributes of the positional and keyword
arguments respectively. For example::
{
'GHI': {'units': 'W/m**2', 'isconstant': False, 'argpos': 0},
'azimuth': {'units': 'degrees', 'isconstant': False, 'argpos': 1},
'DNI': {'units': 'W/m**2', 'isconstant': False},
'zenith': {'units': 'degrees', 'isconstant': False}
}
"""
#: True if reader accepts ``filename`` argument
is_file_reader = False # not a file reader
def load_data(self, *args, **kwargs):
"""
Collects positional and keyword arguments into `data` and applies units.
:return: data
"""
# get positional argument names from parameters and apply them to args
# update data with additional kwargs
argpos = {
v['extras']['argpos']: k for k, v in self.parameters.iteritems()
if 'argpos' in v['extras']
}
data = dict(
{argpos[n]: a for n, a in enumerate(args)}, **kwargs
)
return self.apply_units_to_cache(data)
def apply_units_to_cache(self, data):
"""
Applies units to data when a proxy reader is used. For example if the
data is cached as JSON and retrieved using the
:class:`~simkit.core.data_readers.JSONReader`, then units can be
applied from the original parameter schema.
:param data: Data read by proxy reader.
:return: data with units applied
"""
# if units key exists then apply
for k, v in self.parameters.iteritems():
if v and v.get('units'):
data[k] = Q_(data[k], v.get('units'))
return data
class DjangoModelReader(ArgumentReader):
"""
Reads arguments that are Django objects or lists of objects.
"""
def __init__(self, parameters=None, meta=None):
#: Django model
self.model = meta.model
model_meta = getattr(self.model, '_meta') # make pycharm happy
# model fields excluding AutoFields and related fields like one-to-many
all_model_fields = [
f for f in model_meta.get_fields(include_parents=False)
if not f.auto_created
]
all_field_names = [f.name for f in all_model_fields] # field names
# use all fields if no parameters given
if parameters is None:
parameters = DataParameter.fromkeys(
all_field_names, {}
)
fields = getattr(meta, 'fields', all_field_names) # specified fields
LOGGER.debug('fields:\n%r', fields)
exclude = getattr(meta, 'exclude', []) # specifically excluded fields
for f in all_model_fields:
# skip any fields not specified in data source
if f.name not in fields or f.name in exclude:
LOGGER.debug('skipping %s', f.name)
continue
# add field to parameters or update parameters with field type
param_dict = {'ftype': f.get_internal_type()}
if f.name in parameters:
parameters[f.name]['extras'].update(param_dict)
else:
parameters[f.name] = DataParameter(**param_dict)
super(DjangoModelReader, self).__init__(parameters, meta)
def load_data(self, model_instance, *args, **kwargs):
"""
Apply units to model.
:return: data
"""
model_dict = copy_model_instance(model_instance)
return super(DjangoModelReader, self).load_data(**model_dict)
class HDF5Reader(ArgumentReader):
"""
Reads data from an HDF5 file
"""
#: True if reader accepts ``filename`` argument
is_file_reader = True # is a file reader
def load_data(self, h5file, *args, **kwargs):
with h5py.File(h5file) as h5f:
h5data = dict.fromkeys(self.parameters)
for param, attrs in self.parameters.iteritems():
LOGGER.debug('parameter:\n%r', param)
node = attrs['extras']['node'] # full name of node
# composite datatype member
member = attrs['extras'].get('member')
if member is not None:
# if node is a table then get column/field/description
h5data[param] = np.asarray(h5f[node][member]) # copy member
else:
h5data[param] = np.asarray(h5f[node]) # copy array
return super(HDF5Reader, self).load_data(**h5data)
|
bsd-3-clause
| -144,540,593,695,385,950
| 37.35625
| 80
| 0.61447
| false
| 4.058862
| false
| false
| false
|
anthonyng2/Machine-Learning-For-Finance
|
Regression Based Machine Learning for Algorithmic Trading/Pairs Trading scikit-learn Linear.py
|
1
|
5074
|
'''
Anthony NG
@ 2017
## MIT License
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
import numpy as np
import pandas as pd
from zipline.utils import tradingcalendar
import pytz
from sklearn.linear_model import LinearRegression
reg = LinearRegression(fit_intercept=True)
def initialize(context):
# Quantopian backtester specific variables
set_slippage(slippage.FixedSlippage(spread=0))
set_commission(commission.PerTrade(cost=1))
set_symbol_lookup_date('2014-01-01')
context.stock_pairs = [(sid(5885), sid(4283))]
# set_benchmark(context.y)
context.num_pairs = len(context.stock_pairs)
# strategy specific variables
context.lookback = 20 # used for regression
context.z_window = 20 # used for zscore calculation, must be <= lookback
context.spread = np.ndarray((context.num_pairs, 0))
# context.hedgeRatioTS = np.ndarray((context.num_pairs, 0))
context.inLong = [False] * context.num_pairs
context.inShort = [False] * context.num_pairs
# Only do work 30 minutes before close
schedule_function(func=check_pair_status, date_rule=date_rules.every_day(), time_rule=time_rules.market_close(minutes=30))
# Will be called on every trade event for the securities you specify.
def handle_data(context, data):
# Our work is now scheduled in check_pair_status
pass
def check_pair_status(context, data):
if get_open_orders():
return
prices = history(35, '1d', 'price').iloc[-context.lookback::]
new_spreads = np.ndarray((context.num_pairs, 1))
for i in range(context.num_pairs):
(stock_y, stock_x) = context.stock_pairs[i]
Y = prices[stock_y]
X = prices[stock_x]
try:
hedge = hedge_ratio(Y, X, add_const=True)
except ValueError as e:
log.debug(e)
return
# context.hedgeRatioTS = np.append(context.hedgeRatioTS, hedge)
new_spreads[i, :] = Y[-1] - hedge * X[-1]
if context.spread.shape[1] > context.z_window:
# Keep only the z-score lookback period
spreads = context.spread[i, -context.z_window:]
zscore = (spreads[-1] - spreads.mean()) / spreads.std()
if context.inShort[i] and zscore < 0.0:
order_target(stock_y, 0)
order_target(stock_x, 0)
context.inShort[i] = False
context.inLong[i] = False
record(X_pct=0, Y_pct=0)
return
if context.inLong[i] and zscore > 0.0:
order_target(stock_y, 0)
order_target(stock_x, 0)
context.inShort[i] = False
context.inLong[i] = False
record(X_pct=0, Y_pct=0)
return
if zscore < -1.0 and (not context.inLong[i]):
# Only trade if NOT already in a trade
y_target_shares = 1
X_target_shares = -hedge
context.inLong[i] = True
context.inShort[i] = False
(y_target_pct, x_target_pct) = computeHoldingsPct( y_target_shares,X_target_shares, Y[-1], X[-1] )
order_target_percent( stock_y, y_target_pct * (1.0/context.num_pairs) )
order_target_percent( stock_x, x_target_pct * (1.0/context.num_pairs) )
record(Y_pct=y_target_pct, X_pct=x_target_pct)
return
if zscore > 1.0 and (not context.inShort[i]):
# Only trade if NOT already in a trade
y_target_shares = -1
X_target_shares = hedge
context.inShort[i] = True
context.inLong[i] = False
(y_target_pct, x_target_pct) = computeHoldingsPct( y_target_shares, X_target_shares, Y[-1], X[-1] )
order_target_percent( stock_y, y_target_pct * (1.0/context.num_pairs))
order_target_percent( stock_x, x_target_pct * (1.0/context.num_pairs))
record(Y_pct=y_target_pct, X_pct=x_target_pct)
context.spread = np.hstack([context.spread, new_spreads])
def hedge_ratio(Y, X, add_const=True):
reg.fit(X.reshape(-1,1), Y)
return reg.coef_
def computeHoldingsPct(yShares, xShares, yPrice, xPrice):
yDol = yShares * yPrice
xDol = xShares * xPrice
notionalDol = abs(yDol) + abs(xDol)
y_target_pct = yDol / notionalDol
x_target_pct = xDol / notionalDol
return (y_target_pct, x_target_pct)
|
mit
| -402,640,299,320,053,200
| 37.333333
| 460
| 0.58987
| false
| 3.528512
| false
| false
| false
|
ChinaMassClouds/copenstack-server
|
openstack/src/horizon-2014.2/openstack_dashboard/dashboards/project/access_and_security/floating_ips/workflows.py
|
1
|
6184
|
# Copyright 2012 Nebula, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from neutronclient.common import exceptions as neutron_exc
from horizon import exceptions
from horizon import forms
from horizon.utils import memoized
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.utils import filters
from openstack_dashboard.openstack.common.log import operate_log
ALLOCATE_URL = "horizon:project:access_and_security:floating_ips:allocate"
class AssociateIPAction(workflows.Action):
ip_id = forms.DynamicTypedChoiceField(label=_("IP Address"),
coerce=filters.get_int_or_uuid,
empty_value=None,
add_item_link=ALLOCATE_URL)
instance_id = forms.ChoiceField(label=_("Instance"))
class Meta:
name = _("IP Address")
help_text = _("Select the IP address you wish to associate with "
"the selected instance.")
def __init__(self, *args, **kwargs):
super(AssociateIPAction, self).__init__(*args, **kwargs)
if api.base.is_service_enabled(self.request, 'network'):
label = _("Port to be associated")
else:
label = _("Instance to be associated")
self.fields['instance_id'].label = label
# If AssociateIP is invoked from instance menu, instance_id parameter
# is passed in URL. In Neutron based Floating IP implementation
# an association target is not an instance but a port, so we need
# to get an association target based on a received instance_id
# and set the initial value of instance_id ChoiceField.
q_instance_id = self.request.GET.get('instance_id')
if q_instance_id:
targets = self._get_target_list()
target_id = api.network.floating_ip_target_get_by_instance(
self.request, q_instance_id, targets)
self.initial['instance_id'] = target_id
def populate_ip_id_choices(self, request, context):
ips = []
redirect = reverse('horizon:project:access_and_security:index')
try:
ips = api.network.tenant_floating_ip_list(self.request)
except neutron_exc.ConnectionFailed:
exceptions.handle(self.request, redirect=redirect)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve floating IP addresses.'),
redirect=redirect)
options = sorted([(ip.id, ip.ip) for ip in ips if not ip.port_id])
if options:
options.insert(0, ("", _("Select an IP address")))
else:
options = [("", _("No floating IP addresses allocated"))]
return options
@memoized.memoized_method
def _get_target_list(self):
targets = []
try:
targets = api.network.floating_ip_target_list(self.request)
except Exception:
redirect = reverse('horizon:project:access_and_security:index')
exceptions.handle(self.request,
_('Unable to retrieve instance list.'),
redirect=redirect)
return targets
def populate_instance_id_choices(self, request, context):
targets = self._get_target_list()
instances = []
for target in targets:
instances.append((target.id, target.name))
# Sort instances for easy browsing
instances = sorted(instances, key=lambda x: x[1])
neutron_enabled = api.base.is_service_enabled(request, 'network')
if instances:
if neutron_enabled:
label = _("Select a port")
else:
label = _("Select an instance")
instances.insert(0, ("", label))
else:
if neutron_enabled:
label = _("No ports available")
else:
label = _("No instances available")
instances = (("", label),)
return instances
class AssociateIP(workflows.Step):
action_class = AssociateIPAction
contributes = ("ip_id", "instance_id", "ip_address")
def contribute(self, data, context):
context = super(AssociateIP, self).contribute(data, context)
ip_id = data.get('ip_id', None)
if ip_id:
ip_choices = dict(self.action.fields['ip_id'].choices)
context["ip_address"] = ip_choices.get(ip_id, None)
return context
class IPAssociationWorkflow(workflows.Workflow):
slug = "ip_association"
name = _("Manage Floating IP Associations")
finalize_button_name = _("Associate")
success_message = _('IP address %s associated.')
failure_message = _('Unable to associate IP address %s.')
success_url = "horizon:project:access_and_security:index"
default_steps = (AssociateIP,)
def format_status_message(self, message):
return message % self.context.get('ip_address', 'unknown IP address')
def handle(self, request, data):
try:
api.network.floating_ip_associate(request,
data['ip_id'],
data['instance_id'])
operate_log(request.user.username,
request.user.roles,
"floating ip associate")
except Exception:
exceptions.handle(request)
return False
return True
|
gpl-2.0
| 661,193,687,322,583,200
| 38.139241
| 78
| 0.602523
| false
| 4.484409
| false
| false
| false
|
dbarbier/privot
|
python/test/t_DistFunc_beta.py
|
1
|
1686
|
#! /usr/bin/env python
from openturns import *
TESTPREAMBLE()
RandomGenerator().SetSeed(0)
try :
# Beta related functions
# pBeta
p1Min = 0.2
p1Max = 5.0
n1 = 5
p2Min = 0.2
p2Max = 5.0
n2 = 5
xMin = 0.1
xMax = 0.9
nX = 5
for i1 in range(n1):
p1 = p1Min + (p1Max - p1Min) * i1 / (n1 - 1)
for i2 in range(n2):
p2 = p2Min + (p2Max - p2Min) * i2 / (n2 - 1)
for iX in range(nX):
x = xMin + (xMax - xMin) * iX / (nX - 1)
print "pBeta(", p1, ", ", p2, ", ", x, ")=%.6g" % DistFunc.pBeta(p1, p2, x), ", complementary=%.6g" % DistFunc.pBeta(p1, p2, x, True)
# qBeta
p1Min = 0.2
p1Max = 5.0
n1 = 5
p2Min = 0.2
p2Max = 5.0
n2 = 5
qMin = 0.1
qMax = 0.9
nQ = 5
for i1 in range(n1):
p1 = p1Min + (p1Max - p1Min) * i1 / (n1 - 1)
for i2 in range(n2):
p2 = p2Min + (p2Max - p2Min) * i2 / (n2 - 1)
for iQ in range(nQ):
q = qMin + (qMax - qMin) * iQ / (nQ - 1)
print "qBeta(", p1, ", ", p2, ", ", q, ")=%.6g" % DistFunc.qBeta(p1, p2, q), ", complementary=%.6g" % DistFunc.qBeta(p1, p2, q, True)
# rBeta
p1Min = 0.2
p1Max = 5.0
n1 = 5
p2Min = 0.2
p2Max = 5.0
n2 = 5
nR = 5
for i1 in range(n1):
p1 = p1Min + (p1Max - p1Min) * i1 / (n1 - 1)
for i2 in range(n2):
p2 = p2Min + (p2Max - p2Min) * i2 / (n2 - 1)
for iR in range(nR):
print "rBeta(", p1, ", ", p2, ")=%.6g" % DistFunc.rBeta(p1, p2)
except :
import sys
print "t_DistFunc_beta.py", sys.exc_type, sys.exc_value
|
lgpl-3.0
| 495,374,394,030,962,700
| 26.639344
| 149
| 0.456109
| false
| 2.358042
| false
| false
| false
|
badele/home-assistant
|
homeassistant/components/light/tellstick.py
|
1
|
4248
|
"""
homeassistant.components.light.tellstick
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support for Tellstick lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.tellstick/
"""
from homeassistant.components.light import Light, ATTR_BRIGHTNESS
from homeassistant.const import (EVENT_HOMEASSISTANT_STOP,
ATTR_FRIENDLY_NAME)
REQUIREMENTS = ['tellcore-py==1.1.2']
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Find and return Tellstick lights. """
import tellcore.telldus as telldus
from tellcore.library import DirectCallbackDispatcher
import tellcore.constants as tellcore_constants
core = telldus.TelldusCore(callback_dispatcher=DirectCallbackDispatcher())
switches_and_lights = core.devices()
lights = []
for switch in switches_and_lights:
if switch.methods(tellcore_constants.TELLSTICK_DIM):
lights.append(TellstickLight(switch))
def _device_event_callback(id_, method, data, cid):
""" Called from the TelldusCore library to update one device """
for light_device in lights:
if light_device.tellstick_device.id == id_:
# Execute the update in another thread
light_device.update_ha_state(True)
break
callback_id = core.register_device_event(_device_event_callback)
def unload_telldus_lib(event):
""" Un-register the callback bindings """
if callback_id is not None:
core.unregister_callback(callback_id)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, unload_telldus_lib)
add_devices_callback(lights)
class TellstickLight(Light):
""" Represents a Tellstick light. """
def __init__(self, tellstick_device):
import tellcore.constants as tellcore_constants
self.tellstick_device = tellstick_device
self.state_attr = {ATTR_FRIENDLY_NAME: tellstick_device.name}
self._brightness = 0
self.last_sent_command_mask = (tellcore_constants.TELLSTICK_TURNON |
tellcore_constants.TELLSTICK_TURNOFF |
tellcore_constants.TELLSTICK_DIM |
tellcore_constants.TELLSTICK_UP |
tellcore_constants.TELLSTICK_DOWN)
@property
def name(self):
""" Returns the name of the switch if any. """
return self.tellstick_device.name
@property
def is_on(self):
""" True if switch is on. """
return self._brightness > 0
@property
def brightness(self):
""" Brightness of this light between 0..255. """
return self._brightness
def turn_off(self, **kwargs):
""" Turns the switch off. """
self.tellstick_device.turn_off()
self._brightness = 0
self.update_ha_state()
def turn_on(self, **kwargs):
""" Turns the switch on. """
brightness = kwargs.get(ATTR_BRIGHTNESS)
if brightness is None:
self._brightness = 255
else:
self._brightness = brightness
self.tellstick_device.dim(self._brightness)
self.update_ha_state()
def update(self):
""" Update state of the light. """
import tellcore.constants as tellcore_constants
last_command = self.tellstick_device.last_sent_command(
self.last_sent_command_mask)
if last_command == tellcore_constants.TELLSTICK_TURNON:
self._brightness = 255
elif last_command == tellcore_constants.TELLSTICK_TURNOFF:
self._brightness = 0
elif (last_command == tellcore_constants.TELLSTICK_DIM or
last_command == tellcore_constants.TELLSTICK_UP or
last_command == tellcore_constants.TELLSTICK_DOWN):
last_sent_value = self.tellstick_device.last_sent_value()
if last_sent_value is not None:
self._brightness = last_sent_value
@property
def should_poll(self):
""" Tells Home Assistant not to poll this entity. """
return False
|
mit
| 1,873,232,507,389,760,800
| 33.819672
| 78
| 0.621469
| false
| 4.068966
| false
| false
| false
|
juliantaylor/scipy
|
scipy/interpolate/fitpack2.py
|
1
|
55084
|
"""
fitpack --- curve and surface fitting with splines
fitpack is based on a collection of Fortran routines DIERCKX
by P. Dierckx (see http://www.netlib.org/dierckx/) transformed
to double routines by Pearu Peterson.
"""
# Created by Pearu Peterson, June,August 2003
from __future__ import division, print_function, absolute_import
__all__ = [
'UnivariateSpline',
'InterpolatedUnivariateSpline',
'LSQUnivariateSpline',
'BivariateSpline',
'LSQBivariateSpline',
'SmoothBivariateSpline',
'LSQSphereBivariateSpline',
'SmoothSphereBivariateSpline',
'RectBivariateSpline',
'RectSphereBivariateSpline']
import warnings
from numpy import zeros, concatenate, alltrue, ravel, all, diff, array, ones
import numpy as np
from . import fitpack
from . import dfitpack
################ Univariate spline ####################
_curfit_messages = {1:"""
The required storage space exceeds the available storage space, as
specified by the parameter nest: nest too small. If nest is already
large (say nest > m/2), it may also indicate that s is too small.
The approximation returned is the weighted least-squares spline
according to the knots t[0],t[1],...,t[n-1]. (n=nest) the parameter fp
gives the corresponding weighted sum of squared residuals (fp>s).
""",
2:"""
A theoretically impossible result was found during the iteration
proces for finding a smoothing spline with fp = s: s too small.
There is an approximation returned but the corresponding weighted sum
of squared residuals does not satisfy the condition abs(fp-s)/s < tol.""",
3:"""
The maximal number of iterations maxit (set to 20 by the program)
allowed for finding a smoothing spline with fp=s has been reached: s
too small.
There is an approximation returned but the corresponding weighted sum
of squared residuals does not satisfy the condition abs(fp-s)/s < tol.""",
10:"""
Error on entry, no approximation returned. The following conditions
must hold:
xb<=x[0]<x[1]<...<x[m-1]<=xe, w[i]>0, i=0..m-1
if iopt=-1:
xb<t[k+1]<t[k+2]<...<t[n-k-2]<xe"""
}
class UnivariateSpline(object):
"""
One-dimensional smoothing spline fit to a given set of data points.
Fits a spline y=s(x) of degree `k` to the provided `x`, `y` data. `s`
specifies the number of knots by specifying a smoothing condition.
Parameters
----------
x : (N,) array_like
1-D array of independent input data. Must be increasing.
y : (N,) array_like
1-D array of dependent input data, of the same length as `x`.
w : (N,) array_like, optional
Weights for spline fitting. Must be positive. If None (default),
weights are all equal.
bbox : (2,) array_like, optional
2-sequence specifying the boundary of the approximation interval. If
None (default), ``bbox=[x[0], x[-1]]``.
k : int, optional
Degree of the smoothing spline. Must be <= 5.
s : float or None, optional
Positive smoothing factor used to choose the number of knots. Number
of knots will be increased until the smoothing condition is satisfied:
sum((w[i]*(y[i]-s(x[i])))**2,axis=0) <= s
If None (default), s=len(w) which should be a good value if 1/w[i] is
an estimate of the standard deviation of y[i]. If 0, spline will
interpolate through all data points.
See Also
--------
InterpolatedUnivariateSpline : Subclass with smoothing forced to 0
LSQUnivariateSpline : Subclass in which knots are user-selected instead of
being set by smoothing condition
splrep : An older, non object-oriented wrapping of FITPACK
splev, sproot, splint, spalde
BivariateSpline : A similar class for two-dimensional spline interpolation
Notes
-----
The number of data points must be larger than the spline degree `k`.
Examples
--------
>>> from numpy import linspace,exp
>>> from numpy.random import randn
>>> import matplotlib.pyplot as plt
>>> from scipy.interpolate import UnivariateSpline
>>> x = linspace(-3, 3, 100)
>>> y = exp(-x**2) + randn(100)/10
>>> s = UnivariateSpline(x, y, s=1)
>>> xs = linspace(-3, 3, 1000)
>>> ys = s(xs)
>>> plt.plot(x, y, '.-')
>>> plt.plot(xs, ys)
>>> plt.show()
xs,ys is now a smoothed, super-sampled version of the noisy gaussian x,y.
"""
def __init__(self, x, y, w=None, bbox=[None]*2, k=3, s=None):
"""
Input:
x,y - 1-d sequences of data points (x must be
in strictly ascending order)
Optional input:
w - positive 1-d sequence of weights
bbox - 2-sequence specifying the boundary of
the approximation interval.
By default, bbox=[x[0],x[-1]]
k=3 - degree of the univariate spline.
s - positive smoothing factor defined for
estimation condition:
sum((w[i]*(y[i]-s(x[i])))**2,axis=0) <= s
Default s=len(w) which should be a good value
if 1/w[i] is an estimate of the standard
deviation of y[i].
"""
# _data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier
data = dfitpack.fpcurf0(x,y,k,w=w,
xb=bbox[0],xe=bbox[1],s=s)
if data[-1] == 1:
# nest too small, setting to maximum bound
data = self._reset_nest(data)
self._data = data
self._reset_class()
@classmethod
def _from_tck(cls, tck):
"""Construct a spline object from given tck"""
self = cls.__new__(cls)
t, c, k = tck
self._eval_args = tck
#_data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier
self._data = (None,None,None,None,None,k,None,len(t),t,
c,None,None,None,None)
return self
def _reset_class(self):
data = self._data
n,t,c,k,ier = data[7],data[8],data[9],data[5],data[-1]
self._eval_args = t[:n],c[:n],k
if ier == 0:
# the spline returned has a residual sum of squares fp
# such that abs(fp-s)/s <= tol with tol a relative
# tolerance set to 0.001 by the program
pass
elif ier == -1:
# the spline returned is an interpolating spline
self._set_class(InterpolatedUnivariateSpline)
elif ier == -2:
# the spline returned is the weighted least-squares
# polynomial of degree k. In this extreme case fp gives
# the upper bound fp0 for the smoothing factor s.
self._set_class(LSQUnivariateSpline)
else:
# error
if ier == 1:
self._set_class(LSQUnivariateSpline)
message = _curfit_messages.get(ier,'ier=%s' % (ier))
warnings.warn(message)
def _set_class(self, cls):
self._spline_class = cls
if self.__class__ in (UnivariateSpline, InterpolatedUnivariateSpline,
LSQUnivariateSpline):
self.__class__ = cls
else:
# It's an unknown subclass -- don't change class. cf. #731
pass
def _reset_nest(self, data, nest=None):
n = data[10]
if nest is None:
k,m = data[5],len(data[0])
nest = m+k+1 # this is the maximum bound for nest
else:
if not n <= nest:
raise ValueError("`nest` can only be increased")
t, c, fpint, nrdata = [np.resize(data[n], nest) for n in [8,9,11,12]]
args = data[:8] + (t,c,n,fpint,nrdata,data[13])
data = dfitpack.fpcurf1(*args)
return data
def set_smoothing_factor(self, s):
""" Continue spline computation with the given smoothing
factor s and with the knots found at the last call.
"""
data = self._data
if data[6] == -1:
warnings.warn('smoothing factor unchanged for'
'LSQ spline with fixed knots')
return
args = data[:6] + (s,) + data[7:]
data = dfitpack.fpcurf1(*args)
if data[-1] == 1:
# nest too small, setting to maximum bound
data = self._reset_nest(data)
self._data = data
self._reset_class()
def __call__(self, x, nu=0):
""" Evaluate spline (or its nu-th derivative) at positions x.
Note: x can be unordered but the evaluation is more efficient
if x is (partially) ordered.
"""
x = np.asarray(x)
# empty input yields empty output
if x.size == 0:
return array([])
# if nu is None:
# return dfitpack.splev(*(self._eval_args+(x,)))
# return dfitpack.splder(nu=nu,*(self._eval_args+(x,)))
return fitpack.splev(x, self._eval_args, der=nu)
def get_knots(self):
""" Return positions of (boundary and interior) knots of the spline.
"""
data = self._data
k,n = data[5],data[7]
return data[8][k:n-k]
def get_coeffs(self):
"""Return spline coefficients."""
data = self._data
k,n = data[5],data[7]
return data[9][:n-k-1]
def get_residual(self):
"""Return weighted sum of squared residuals of the spline
approximation: ``sum((w[i] * (y[i]-s(x[i])))**2, axis=0)``.
"""
return self._data[10]
def integral(self, a, b):
""" Return definite integral of the spline between two given points.
"""
return dfitpack.splint(*(self._eval_args+(a,b)))
def derivatives(self, x):
""" Return all derivatives of the spline at the point x."""
d,ier = dfitpack.spalde(*(self._eval_args+(x,)))
if not ier == 0:
raise ValueError("Error code returned by spalde: %s" % ier)
return d
def roots(self):
""" Return the zeros of the spline.
Restriction: only cubic splines are supported by fitpack.
"""
k = self._data[5]
if k == 3:
z,m,ier = dfitpack.sproot(*self._eval_args[:2])
if not ier == 0:
raise ValueError("Error code returned by spalde: %s" % ier)
return z[:m]
raise NotImplementedError('finding roots unsupported for '
'non-cubic splines')
def derivative(self, n=1):
"""
Construct a new spline representing the derivative of this spline.
.. versionadded:: 0.13.0
Parameters
----------
n : int, optional
Order of derivative to evaluate. Default: 1
Returns
-------
spline : UnivariateSpline
Spline of order k2=k-n representing the derivative of this
spline.
See Also
--------
splder, antiderivative
Examples
--------
This can be used for finding maxima of a curve:
>>> from scipy.interpolate import UnivariateSpline
>>> x = np.linspace(0, 10, 70)
>>> y = np.sin(x)
>>> spl = UnivariateSpline(x, y, k=4, s=0)
Now, differentiate the spline and find the zeros of the
derivative. (NB: `sproot` only works for order 3 splines, so we
fit an order 4 spline):
>>> spl.derivative().roots() / np.pi
array([ 0.50000001, 1.5 , 2.49999998])
This agrees well with roots :math:`\pi/2 + n\pi` of `cos(x) = sin'(x)`.
"""
tck = fitpack.splder(self._eval_args, n)
return UnivariateSpline._from_tck(tck)
def antiderivative(self, n=1):
"""
Construct a new spline representing the antiderivative of this spline.
.. versionadded:: 0.13.0
Parameters
----------
n : int, optional
Order of antiderivative to evaluate. Default: 1
Returns
-------
spline : UnivariateSpline
Spline of order k2=k+n representing the antiderivative of this
spline.
See Also
--------
splantider, derivative
Examples
--------
>>> from scipy.interpolate import UnivariateSpline
>>> x = np.linspace(0, np.pi/2, 70)
>>> y = 1 / np.sqrt(1 - 0.8*np.sin(x)**2)
>>> spl = UnivariateSpline(x, y, s=0)
The derivative is the inverse operation of the antiderivative,
although some floating point error accumulates:
>>> spl(1.7), spl.antiderivative().derivative()(1.7)
(array(2.1565429877197317), array(2.1565429877201865))
Antiderivative can be used to evaluate definite integrals:
>>> ispl = spl.antiderivative()
>>> ispl(np.pi/2) - ispl(0)
2.2572053588768486
This is indeed an approximation to the complete elliptic integral
:math:`K(m) = \\int_0^{\\pi/2} [1 - m\\sin^2 x]^{-1/2} dx`:
>>> from scipy.special import ellipk
>>> ellipk(0.8)
2.2572053268208538
"""
tck = fitpack.splantider(self._eval_args, n)
return UnivariateSpline._from_tck(tck)
class InterpolatedUnivariateSpline(UnivariateSpline):
"""
One-dimensional interpolating spline for a given set of data points.
Fits a spline y=s(x) of degree `k` to the provided `x`, `y` data. Spline
function passes through all provided points. Equivalent to
`UnivariateSpline` with s=0.
Parameters
----------
x : (N,) array_like
Input dimension of data points -- must be increasing
y : (N,) array_like
input dimension of data points
w : (N,) array_like, optional
Weights for spline fitting. Must be positive. If None (default),
weights are all equal.
bbox : (2,) array_like, optional
2-sequence specifying the boundary of the approximation interval. If
None (default), bbox=[x[0],x[-1]].
k : int, optional
Degree of the smoothing spline. Must be 1 <= `k` <= 5.
See Also
--------
UnivariateSpline : Superclass -- allows knots to be selected by a
smoothing condition
LSQUnivariateSpline : spline for which knots are user-selected
splrep : An older, non object-oriented wrapping of FITPACK
splev, sproot, splint, spalde
BivariateSpline : A similar class for two-dimensional spline interpolation
Notes
-----
The number of data points must be larger than the spline degree `k`.
Examples
--------
>>> from numpy import linspace,exp
>>> from numpy.random import randn
>>> from scipy.interpolate import InterpolatedUnivariateSpline
>>> import matplotlib.pyplot as plt
>>> x = linspace(-3, 3, 100)
>>> y = exp(-x**2) + randn(100)/10
>>> s = InterpolatedUnivariateSpline(x, y)
>>> xs = linspace(-3, 3, 1000)
>>> ys = s(xs)
>>> plt.plot(x, y, '.-')
>>> plt.plot(xs, ys)
>>> plt.show()
xs,ys is now a smoothed, super-sampled version of the noisy gaussian x,y
"""
def __init__(self, x, y, w=None, bbox=[None]*2, k=3):
"""
Input:
x,y - 1-d sequences of data points (x must be
in strictly ascending order)
Optional input:
w - positive 1-d sequence of weights
bbox - 2-sequence specifying the boundary of
the approximation interval.
By default, bbox=[x[0],x[-1]]
k=3 - degree of the univariate spline.
"""
# _data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier
self._data = dfitpack.fpcurf0(x,y,k,w=w,
xb=bbox[0],xe=bbox[1],s=0)
self._reset_class()
class LSQUnivariateSpline(UnivariateSpline):
"""
One-dimensional spline with explicit internal knots.
Fits a spline y=s(x) of degree `k` to the provided `x`, `y` data. `t`
specifies the internal knots of the spline
Parameters
----------
x : (N,) array_like
Input dimension of data points -- must be increasing
y : (N,) array_like
Input dimension of data points
t : (M,) array_like
interior knots of the spline. Must be in ascending order
and bbox[0]<t[0]<...<t[-1]<bbox[-1]
w : (N,) array_like, optional
weights for spline fitting. Must be positive. If None (default),
weights are all equal.
bbox : (2,) array_like, optional
2-sequence specifying the boundary of the approximation interval. If
None (default), bbox=[x[0],x[-1]].
k : int, optional
Degree of the smoothing spline. Must be 1 <= `k` <= 5.
Raises
------
ValueError
If the interior knots do not satisfy the Schoenberg-Whitney conditions
See Also
--------
UnivariateSpline : Superclass -- knots are specified by setting a
smoothing condition
InterpolatedUnivariateSpline : spline passing through all points
splrep : An older, non object-oriented wrapping of FITPACK
splev, sproot, splint, spalde
BivariateSpline : A similar class for two-dimensional spline interpolation
Notes
-----
The number of data points must be larger than the spline degree `k`.
Examples
--------
>>> from numpy import linspace,exp
>>> from numpy.random import randn
>>> from scipy.interpolate import LSQUnivariateSpline
>>> import matplotlib.pyplot as plt
>>> x = linspace(-3,3,100)
>>> y = exp(-x**2) + randn(100)/10
>>> t = [-1,0,1]
>>> s = LSQUnivariateSpline(x,y,t)
>>> xs = linspace(-3,3,1000)
>>> ys = s(xs)
>>> plt.plot(x, y, '.-')
>>> plt.plot(xs, ys)
>>> plt.show()
xs,ys is now a smoothed, super-sampled version of the noisy gaussian x,y
with knots [-3,-1,0,1,3]
"""
def __init__(self, x, y, t, w=None, bbox=[None]*2, k=3):
"""
Input:
x,y - 1-d sequences of data points (x must be
in strictly ascending order)
t - 1-d sequence of the positions of user-defined
interior knots of the spline (t must be in strictly
ascending order and bbox[0]<t[0]<...<t[-1]<bbox[-1])
Optional input:
w - positive 1-d sequence of weights
bbox - 2-sequence specifying the boundary of
the approximation interval.
By default, bbox=[x[0],x[-1]]
k=3 - degree of the univariate spline.
"""
# _data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier
xb = bbox[0]
xe = bbox[1]
if xb is None:
xb = x[0]
if xe is None:
xe = x[-1]
t = concatenate(([xb]*(k+1),t,[xe]*(k+1)))
n = len(t)
if not alltrue(t[k+1:n-k]-t[k:n-k-1] > 0,axis=0):
raise ValueError('Interior knots t must satisfy '
'Schoenberg-Whitney conditions')
data = dfitpack.fpcurfm1(x,y,k,t,w=w,xb=xb,xe=xe)
self._data = data[:-3] + (None,None,data[-1])
self._reset_class()
################ Bivariate spline ####################
class _BivariateSplineBase(object):
""" Base class for Bivariate spline s(x,y) interpolation on the rectangle
[xb,xe] x [yb, ye] calculated from a given set of data points
(x,y,z).
See Also
--------
bisplrep, bisplev : an older wrapping of FITPACK
BivariateSpline :
implementation of bivariate spline interpolation on a plane grid
SphereBivariateSpline :
implementation of bivariate spline interpolation on a spherical grid
"""
def get_residual(self):
""" Return weighted sum of squared residuals of the spline
approximation: sum ((w[i]*(z[i]-s(x[i],y[i])))**2,axis=0)
"""
return self.fp
def get_knots(self):
""" Return a tuple (tx,ty) where tx,ty contain knots positions
of the spline with respect to x-, y-variable, respectively.
The position of interior and additional knots are given as
t[k+1:-k-1] and t[:k+1]=b, t[-k-1:]=e, respectively.
"""
return self.tck[:2]
def get_coeffs(self):
""" Return spline coefficients."""
return self.tck[2]
def __call__(self, x, y, mth=None, dx=0, dy=0, grid=True):
"""
Evaluate the spline or its derivatives at given positions.
Parameters
----------
x, y : array-like
Input coordinates.
If `grid` is False, evaluate the spline at points ``(x[i],
y[i]), i=0, ..., len(x)-1``. Standard Numpy broadcasting
is obeyed.
If `grid` is True: evaluate spline at the grid points
defined by the coordinate arrays x, y. The arrays must be
sorted to increasing order.
dx : int
Order of x-derivative
.. versionadded:: 0.14.0
dy : int
Order of y-derivative
.. versionadded:: 0.14.0
grid : bool
Whether to evaluate the results on a grid spanned by the
input arrays, or at points specified by the input arrays.
.. versionadded:: 0.14.0
mth : str
Deprecated argument. Has no effect.
"""
x = np.asarray(x)
y = np.asarray(y)
if mth is not None:
warnings.warn("The `mth` argument is deprecated and will be removed",
FutureWarning)
tx, ty, c = self.tck[:3]
kx, ky = self.degrees
if grid:
if x.size == 0 or y.size == 0:
return np.zeros((x.size, y.size), dtype=self.tck[2].dtype)
if dx or dy:
z,ier = dfitpack.parder(tx,ty,c,kx,ky,dx,dy,x,y)
if not ier == 0:
raise ValueError("Error code returned by parder: %s" % ier)
else:
z,ier = dfitpack.bispev(tx,ty,c,kx,ky,x,y)
if not ier == 0:
raise ValueError("Error code returned by bispev: %s" % ier)
else:
# standard Numpy broadcasting
if x.shape != y.shape:
x, y = np.broadcast_arrays(x, y)
shape = x.shape
x = x.ravel()
y = y.ravel()
if x.size == 0 or y.size == 0:
return np.zeros(shape, dtype=self.tck[2].dtype)
if dx or dy:
z,ier = dfitpack.pardeu(tx,ty,c,kx,ky,dx,dy,x,y)
if not ier == 0:
raise ValueError("Error code returned by pardeu: %s" % ier)
else:
z,ier = dfitpack.bispeu(tx,ty,c,kx,ky,x,y)
if not ier == 0:
raise ValueError("Error code returned by bispeu: %s" % ier)
z = z.reshape(shape)
return z
_surfit_messages = {1:"""
The required storage space exceeds the available storage space: nxest
or nyest too small, or s too small.
The weighted least-squares spline corresponds to the current set of
knots.""",
2:"""
A theoretically impossible result was found during the iteration
process for finding a smoothing spline with fp = s: s too small or
badly chosen eps.
Weighted sum of squared residuals does not satisfy abs(fp-s)/s < tol.""",
3:"""
the maximal number of iterations maxit (set to 20 by the program)
allowed for finding a smoothing spline with fp=s has been reached:
s too small.
Weighted sum of squared residuals does not satisfy abs(fp-s)/s < tol.""",
4:"""
No more knots can be added because the number of b-spline coefficients
(nx-kx-1)*(ny-ky-1) already exceeds the number of data points m:
either s or m too small.
The weighted least-squares spline corresponds to the current set of
knots.""",
5:"""
No more knots can be added because the additional knot would (quasi)
coincide with an old one: s too small or too large a weight to an
inaccurate data point.
The weighted least-squares spline corresponds to the current set of
knots.""",
10:"""
Error on entry, no approximation returned. The following conditions
must hold:
xb<=x[i]<=xe, yb<=y[i]<=ye, w[i]>0, i=0..m-1
If iopt==-1, then
xb<tx[kx+1]<tx[kx+2]<...<tx[nx-kx-2]<xe
yb<ty[ky+1]<ty[ky+2]<...<ty[ny-ky-2]<ye""",
-3:"""
The coefficients of the spline returned have been computed as the
minimal norm least-squares solution of a (numerically) rank deficient
system (deficiency=%i). If deficiency is large, the results may be
inaccurate. Deficiency may strongly depend on the value of eps."""
}
class BivariateSpline(_BivariateSplineBase):
"""
Base class for bivariate splines.
This describes a spline ``s(x, y)`` of degrees ``kx`` and ``ky`` on
the rectangle ``[xb, xe] * [yb, ye]`` calculated from a given set
of data points ``(x, y, z)``.
This class is meant to be subclassed, not instantiated directly.
To construct these splines, call either `SmoothBivariateSpline` or
`LSQBivariateSpline`.
See Also
--------
UnivariateSpline : a similar class for univariate spline interpolation
SmoothBivariateSpline :
to create a BivariateSpline through the given points
LSQBivariateSpline :
to create a BivariateSpline using weighted least-squares fitting
SphereBivariateSpline :
bivariate spline interpolation in spherical cooridinates
bisplrep : older wrapping of FITPACK
bisplev : older wrapping of FITPACK
"""
def ev(self, xi, yi, dx=0, dy=0):
"""
Evaluate the spline at points
Returns the interpolated value at ``(xi[i], yi[i]),
i=0,...,len(xi)-1``.
Parameters
----------
xi, yi : array-like
Input coordinates. Standard Numpy broadcasting is obeyed.
dx : int
Order of x-derivative
.. versionadded:: 0.14.0
dy : int
Order of y-derivative
.. versionadded:: 0.14.0
"""
return self.__call__(xi, yi, dx=dx, dy=dy, grid=False)
def integral(self, xa, xb, ya, yb):
"""
Evaluate the integral of the spline over area [xa,xb] x [ya,yb].
Parameters
----------
xa, xb : float
The end-points of the x integration interval.
ya, yb : float
The end-points of the y integration interval.
Returns
-------
integ : float
The value of the resulting integral.
"""
tx,ty,c = self.tck[:3]
kx,ky = self.degrees
return dfitpack.dblint(tx,ty,c,kx,ky,xa,xb,ya,yb)
class SmoothBivariateSpline(BivariateSpline):
"""
Smooth bivariate spline approximation.
Parameters
----------
x, y, z : array_like
1-D sequences of data points (order is not important).
w : array_like, optional
Positive 1-D sequence of weights, of same length as `x`, `y` and `z`.
bbox : array_like, optional
Sequence of length 4 specifying the boundary of the rectangular
approximation domain. By default,
``bbox=[min(x,tx),max(x,tx), min(y,ty),max(y,ty)]``.
kx, ky : ints, optional
Degrees of the bivariate spline. Default is 3.
s : float, optional
Positive smoothing factor defined for estimation condition:
``sum((w[i]*(z[i]-s(x[i], y[i])))**2, axis=0) <= s``
Default ``s=len(w)`` which should be a good value if ``1/w[i]`` is an
estimate of the standard deviation of ``z[i]``.
eps : float, optional
A threshold for determining the effective rank of an over-determined
linear system of equations. `eps` should have a value between 0 and 1,
the default is 1e-16.
See Also
--------
bisplrep : an older wrapping of FITPACK
bisplev : an older wrapping of FITPACK
UnivariateSpline : a similar class for univariate spline interpolation
LSQUnivariateSpline : to create a BivariateSpline using weighted
Notes
-----
The length of `x`, `y` and `z` should be at least ``(kx+1) * (ky+1)``.
"""
def __init__(self, x, y, z, w=None, bbox=[None] * 4, kx=3, ky=3, s=None,
eps=None):
xb,xe,yb,ye = bbox
nx,tx,ny,ty,c,fp,wrk1,ier = dfitpack.surfit_smth(x,y,z,w,
xb,xe,yb,ye,
kx,ky,s=s,
eps=eps,lwrk2=1)
if ier > 10: # lwrk2 was to small, re-run
nx,tx,ny,ty,c,fp,wrk1,ier = dfitpack.surfit_smth(x,y,z,w,
xb,xe,yb,ye,
kx,ky,s=s,
eps=eps,lwrk2=ier)
if ier in [0,-1,-2]: # normal return
pass
else:
message = _surfit_messages.get(ier,'ier=%s' % (ier))
warnings.warn(message)
self.fp = fp
self.tck = tx[:nx],ty[:ny],c[:(nx-kx-1)*(ny-ky-1)]
self.degrees = kx,ky
class LSQBivariateSpline(BivariateSpline):
"""
Weighted least-squares bivariate spline approximation.
Parameters
----------
x, y, z : array_like
1-D sequences of data points (order is not important).
tx, ty : array_like
Strictly ordered 1-D sequences of knots coordinates.
w : array_like, optional
Positive 1-D array of weights, of the same length as `x`, `y` and `z`.
bbox : (4,) array_like, optional
Sequence of length 4 specifying the boundary of the rectangular
approximation domain. By default,
``bbox=[min(x,tx),max(x,tx), min(y,ty),max(y,ty)]``.
kx, ky : ints, optional
Degrees of the bivariate spline. Default is 3.
s : float, optional
Positive smoothing factor defined for estimation condition:
``sum((w[i]*(z[i]-s(x[i], y[i])))**2, axis=0) <= s``
Default ``s=len(w)`` which should be a good value if ``1/w[i]`` is an
estimate of the standard deviation of ``z[i]``.
eps : float, optional
A threshold for determining the effective rank of an over-determined
linear system of equations. `eps` should have a value between 0 and 1,
the default is 1e-16.
See Also
--------
bisplrep : an older wrapping of FITPACK
bisplev : an older wrapping of FITPACK
UnivariateSpline : a similar class for univariate spline interpolation
SmoothBivariateSpline : create a smoothing BivariateSpline
Notes
-----
The length of `x`, `y` and `z` should be at least ``(kx+1) * (ky+1)``.
"""
def __init__(self, x, y, z, tx, ty, w=None, bbox=[None]*4, kx=3, ky=3,
eps=None):
nx = 2*kx+2+len(tx)
ny = 2*ky+2+len(ty)
tx1 = zeros((nx,),float)
ty1 = zeros((ny,),float)
tx1[kx+1:nx-kx-1] = tx
ty1[ky+1:ny-ky-1] = ty
xb,xe,yb,ye = bbox
tx1,ty1,c,fp,ier = dfitpack.surfit_lsq(x,y,z,tx1,ty1,w,
xb,xe,yb,ye,
kx,ky,eps,lwrk2=1)
if ier > 10:
tx1,ty1,c,fp,ier = dfitpack.surfit_lsq(x,y,z,tx1,ty1,w,
xb,xe,yb,ye,
kx,ky,eps,lwrk2=ier)
if ier in [0,-1,-2]: # normal return
pass
else:
if ier < -2:
deficiency = (nx-kx-1)*(ny-ky-1)+ier
message = _surfit_messages.get(-3) % (deficiency)
else:
message = _surfit_messages.get(ier, 'ier=%s' % (ier))
warnings.warn(message)
self.fp = fp
self.tck = tx1, ty1, c
self.degrees = kx, ky
class RectBivariateSpline(BivariateSpline):
"""
Bivariate spline approximation over a rectangular mesh.
Can be used for both smoothing and interpolating data.
Parameters
----------
x,y : array_like
1-D arrays of coordinates in strictly ascending order.
z : array_like
2-D array of data with shape (x.size,y.size).
bbox : array_like, optional
Sequence of length 4 specifying the boundary of the rectangular
approximation domain. By default,
``bbox=[min(x,tx),max(x,tx), min(y,ty),max(y,ty)]``.
kx, ky : ints, optional
Degrees of the bivariate spline. Default is 3.
s : float, optional
Positive smoothing factor defined for estimation condition:
``sum((w[i]*(z[i]-s(x[i], y[i])))**2, axis=0) <= s``
Default is ``s=0``, which is for interpolation.
See Also
--------
SmoothBivariateSpline : a smoothing bivariate spline for scattered data
bisplrep : an older wrapping of FITPACK
bisplev : an older wrapping of FITPACK
UnivariateSpline : a similar class for univariate spline interpolation
"""
def __init__(self, x, y, z, bbox=[None] * 4, kx=3, ky=3, s=0):
x, y = ravel(x), ravel(y)
if not all(diff(x) > 0.0):
raise TypeError('x must be strictly increasing')
if not all(diff(y) > 0.0):
raise TypeError('y must be strictly increasing')
if not ((x.min() == x[0]) and (x.max() == x[-1])):
raise TypeError('x must be strictly ascending')
if not ((y.min() == y[0]) and (y.max() == y[-1])):
raise TypeError('y must be strictly ascending')
if not x.size == z.shape[0]:
raise TypeError('x dimension of z must have same number of '
'elements as x')
if not y.size == z.shape[1]:
raise TypeError('y dimension of z must have same number of '
'elements as y')
z = ravel(z)
xb, xe, yb, ye = bbox
nx, tx, ny, ty, c, fp, ier = dfitpack.regrid_smth(x, y, z, xb, xe, yb,
ye, kx, ky, s)
if not ier in [0, -1, -2]:
msg = _surfit_messages.get(ier, 'ier=%s' % (ier))
raise ValueError(msg)
self.fp = fp
self.tck = tx[:nx], ty[:ny], c[:(nx - kx - 1) * (ny - ky - 1)]
self.degrees = kx, ky
_spherefit_messages = _surfit_messages.copy()
_spherefit_messages[10] = """
ERROR. On entry, the input data are controlled on validity. The following
restrictions must be satisfied:
-1<=iopt<=1, m>=2, ntest>=8 ,npest >=8, 0<eps<1,
0<=teta(i)<=pi, 0<=phi(i)<=2*pi, w(i)>0, i=1,...,m
lwrk1 >= 185+52*v+10*u+14*u*v+8*(u-1)*v**2+8*m
kwrk >= m+(ntest-7)*(npest-7)
if iopt=-1: 8<=nt<=ntest , 9<=np<=npest
0<tt(5)<tt(6)<...<tt(nt-4)<pi
0<tp(5)<tp(6)<...<tp(np-4)<2*pi
if iopt>=0: s>=0
if one of these conditions is found to be violated,control
is immediately repassed to the calling program. in that
case there is no approximation returned."""
_spherefit_messages[-3] = """
WARNING. The coefficients of the spline returned have been computed as the
minimal norm least-squares solution of a (numerically) rank
deficient system (deficiency=%i, rank=%i). Especially if the rank
deficiency, which is computed by 6+(nt-8)*(np-7)+ier, is large,
the results may be inaccurate. They could also seriously depend on
the value of eps."""
class SphereBivariateSpline(_BivariateSplineBase):
"""
Bivariate spline s(x,y) of degrees 3 on a sphere, calculated from a
given set of data points (theta,phi,r).
.. versionadded:: 0.11.0
See Also
--------
bisplrep, bisplev : an older wrapping of FITPACK
UnivariateSpline : a similar class for univariate spline interpolation
SmoothUnivariateSpline :
to create a BivariateSpline through the given points
LSQUnivariateSpline :
to create a BivariateSpline using weighted least-squares fitting
"""
def __call__(self, theta, phi, dtheta=0, dphi=0, grid=True):
"""
Evaluate the spline or its derivatives at given positions.
Parameters
----------
theta, phi : array-like
Input coordinates.
If `grid` is False, evaluate the spline at points
``(theta[i], phi[i]), i=0, ..., len(x)-1``. Standard
Numpy broadcasting is obeyed.
If `grid` is True: evaluate spline at the grid points
defined by the coordinate arrays theta, phi. The arrays
must be sorted to increasing order.
dtheta : int
Order of theta-derivative
.. versionadded:: 0.14.0
dphi : int
Order of phi-derivative
.. versionadded:: 0.14.0
grid : bool
Whether to evaluate the results on a grid spanned by the
input arrays, or at points specified by the input arrays.
.. versionadded:: 0.14.0
"""
theta = np.asarray(theta)
phi = np.asarray(phi)
if theta.size > 0 and (theta.min() < 0. or theta.max() > np.pi):
raise ValueError("requested theta out of bounds.")
if phi.size > 0 and (phi.min() < 0. or phi.max() > 2. * np.pi):
raise ValueError("requested phi out of bounds.")
return _BivariateSplineBase.__call__(self, theta, phi,
dx=dtheta, dy=dphi, grid=grid)
def ev(self, theta, phi, dtheta=0, dphi=0):
"""
Evaluate the spline at points
Returns the interpolated value at ``(theta[i], phi[i]),
i=0,...,len(theta)-1``.
Parameters
----------
theta, phi : array-like
Input coordinates. Standard Numpy broadcasting is obeyed.
dtheta : int
Order of theta-derivative
.. versionadded:: 0.14.0
dphi : int
Order of phi-derivative
.. versionadded:: 0.14.0
"""
return self.__call__(theta, phi, dtheta=dtheta, dphi=dphi, grid=False)
class SmoothSphereBivariateSpline(SphereBivariateSpline):
"""
Smooth bivariate spline approximation in spherical coordinates.
.. versionadded:: 0.11.0
Parameters
----------
theta, phi, r : array_like
1-D sequences of data points (order is not important). Coordinates
must be given in radians. Theta must lie within the interval (0, pi),
and phi must lie within the interval (0, 2pi).
w : array_like, optional
Positive 1-D sequence of weights.
s : float, optional
Positive smoothing factor defined for estimation condition:
``sum((w(i)*(r(i) - s(theta(i), phi(i))))**2, axis=0) <= s``
Default ``s=len(w)`` which should be a good value if 1/w[i] is an
estimate of the standard deviation of r[i].
eps : float, optional
A threshold for determining the effective rank of an over-determined
linear system of equations. `eps` should have a value between 0 and 1,
the default is 1e-16.
Notes
-----
For more information, see the FITPACK_ site about this function.
.. _FITPACK: http://www.netlib.org/dierckx/sphere.f
Examples
--------
Suppose we have global data on a coarse grid (the input data does not
have to be on a grid):
>>> theta = np.linspace(0., np.pi, 7)
>>> phi = np.linspace(0., 2*np.pi, 9)
>>> data = np.empty((theta.shape[0], phi.shape[0]))
>>> data[:,0], data[0,:], data[-1,:] = 0., 0., 0.
>>> data[1:-1,1], data[1:-1,-1] = 1., 1.
>>> data[1,1:-1], data[-2,1:-1] = 1., 1.
>>> data[2:-2,2], data[2:-2,-2] = 2., 2.
>>> data[2,2:-2], data[-3,2:-2] = 2., 2.
>>> data[3,3:-2] = 3.
>>> data = np.roll(data, 4, 1)
We need to set up the interpolator object
>>> lats, lons = np.meshgrid(theta, phi)
>>> from scipy.interpolate import SmoothSphereBivariateSpline
>>> lut = SmoothSphereBivariateSpline(lats.ravel(), lons.ravel(),
data.T.ravel(),s=3.5)
As a first test, we'll see what the algorithm returns when run on the
input coordinates
>>> data_orig = lut(theta, phi)
Finally we interpolate the data to a finer grid
>>> fine_lats = np.linspace(0., np.pi, 70)
>>> fine_lons = np.linspace(0., 2 * np.pi, 90)
>>> data_smth = lut(fine_lats, fine_lons)
>>> fig = plt.figure()
>>> ax1 = fig.add_subplot(131)
>>> ax1.imshow(data, interpolation='nearest')
>>> ax2 = fig.add_subplot(132)
>>> ax2.imshow(data_orig, interpolation='nearest')
>>> ax3 = fig.add_subplot(133)
>>> ax3.imshow(data_smth, interpolation='nearest')
>>> plt.show()
"""
def __init__(self, theta, phi, r, w=None, s=0., eps=1E-16):
if np.issubclass_(w, float):
w = ones(len(theta)) * w
nt_, tt_, np_, tp_, c, fp, ier = dfitpack.spherfit_smth(theta, phi,
r, w=w, s=s,
eps=eps)
if not ier in [0, -1, -2]:
message = _spherefit_messages.get(ier, 'ier=%s' % (ier))
raise ValueError(message)
self.fp = fp
self.tck = tt_[:nt_], tp_[:np_], c[:(nt_ - 4) * (np_ - 4)]
self.degrees = (3, 3)
class LSQSphereBivariateSpline(SphereBivariateSpline):
"""
Weighted least-squares bivariate spline approximation in spherical
coordinates.
.. versionadded:: 0.11.0
Parameters
----------
theta, phi, r : array_like
1-D sequences of data points (order is not important). Coordinates
must be given in radians. Theta must lie within the interval (0, pi),
and phi must lie within the interval (0, 2pi).
tt, tp : array_like
Strictly ordered 1-D sequences of knots coordinates.
Coordinates must satisfy ``0 < tt[i] < pi``, ``0 < tp[i] < 2*pi``.
w : array_like, optional
Positive 1-D sequence of weights, of the same length as `theta`, `phi`
and `r`.
eps : float, optional
A threshold for determining the effective rank of an over-determined
linear system of equations. `eps` should have a value between 0 and 1,
the default is 1e-16.
Notes
-----
For more information, see the FITPACK_ site about this function.
.. _FITPACK: http://www.netlib.org/dierckx/sphere.f
Examples
--------
Suppose we have global data on a coarse grid (the input data does not
have to be on a grid):
>>> theta = np.linspace(0., np.pi, 7)
>>> phi = np.linspace(0., 2*np.pi, 9)
>>> data = np.empty((theta.shape[0], phi.shape[0]))
>>> data[:,0], data[0,:], data[-1,:] = 0., 0., 0.
>>> data[1:-1,1], data[1:-1,-1] = 1., 1.
>>> data[1,1:-1], data[-2,1:-1] = 1., 1.
>>> data[2:-2,2], data[2:-2,-2] = 2., 2.
>>> data[2,2:-2], data[-3,2:-2] = 2., 2.
>>> data[3,3:-2] = 3.
>>> data = np.roll(data, 4, 1)
We need to set up the interpolator object. Here, we must also specify the
coordinates of the knots to use.
>>> lats, lons = np.meshgrid(theta, phi)
>>> knotst, knotsp = theta.copy(), phi.copy()
>>> knotst[0] += .0001
>>> knotst[-1] -= .0001
>>> knotsp[0] += .0001
>>> knotsp[-1] -= .0001
>>> from scipy.interpolate import LSQSphereBivariateSpline
>>> lut = LSQSphereBivariateSpline(lats.ravel(), lons.ravel(),
data.T.ravel(),knotst,knotsp)
As a first test, we'll see what the algorithm returns when run on the
input coordinates
>>> data_orig = lut(theta, phi)
Finally we interpolate the data to a finer grid
>>> fine_lats = np.linspace(0., np.pi, 70)
>>> fine_lons = np.linspace(0., 2*np.pi, 90)
>>> data_lsq = lut(fine_lats, fine_lons)
>>> fig = plt.figure()
>>> ax1 = fig.add_subplot(131)
>>> ax1.imshow(data, interpolation='nearest')
>>> ax2 = fig.add_subplot(132)
>>> ax2.imshow(data_orig, interpolation='nearest')
>>> ax3 = fig.add_subplot(133)
>>> ax3.imshow(data_lsq, interpolation='nearest')
>>> plt.show()
"""
def __init__(self, theta, phi, r, tt, tp, w=None, eps=1E-16):
if np.issubclass_(w, float):
w = ones(len(theta)) * w
nt_, np_ = 8 + len(tt), 8 + len(tp)
tt_, tp_ = zeros((nt_,), float), zeros((np_,), float)
tt_[4:-4], tp_[4:-4] = tt, tp
tt_[-4:], tp_[-4:] = np.pi, 2. * np.pi
tt_, tp_, c, fp, ier = dfitpack.spherfit_lsq(theta, phi, r, tt_, tp_,
w=w, eps=eps)
if ier < -2:
deficiency = 6 + (nt_ - 8) * (np_ - 7) + ier
message = _spherefit_messages.get(-3) % (deficiency, -ier)
warnings.warn(message)
elif not ier in [0, -1, -2]:
message = _spherefit_messages.get(ier, 'ier=%s' % (ier))
raise ValueError(message)
self.fp = fp
self.tck = tt_, tp_, c
self.degrees = (3, 3)
_spfit_messages = _surfit_messages.copy()
_spfit_messages[10] = """
ERROR: on entry, the input data are controlled on validity
the following restrictions must be satisfied.
-1<=iopt(1)<=1, 0<=iopt(2)<=1, 0<=iopt(3)<=1,
-1<=ider(1)<=1, 0<=ider(2)<=1, ider(2)=0 if iopt(2)=0.
-1<=ider(3)<=1, 0<=ider(4)<=1, ider(4)=0 if iopt(3)=0.
mu >= mumin (see above), mv >= 4, nuest >=8, nvest >= 8,
kwrk>=5+mu+mv+nuest+nvest,
lwrk >= 12+nuest*(mv+nvest+3)+nvest*24+4*mu+8*mv+max(nuest,mv+nvest)
0< u(i-1)<u(i)< pi,i=2,..,mu,
-pi<=v(1)< pi, v(1)<v(i-1)<v(i)<v(1)+2*pi, i=3,...,mv
if iopt(1)=-1: 8<=nu<=min(nuest,mu+6+iopt(2)+iopt(3))
0<tu(5)<tu(6)<...<tu(nu-4)< pi
8<=nv<=min(nvest,mv+7)
v(1)<tv(5)<tv(6)<...<tv(nv-4)<v(1)+2*pi
the schoenberg-whitney conditions, i.e. there must be
subset of grid co-ordinates uu(p) and vv(q) such that
tu(p) < uu(p) < tu(p+4) ,p=1,...,nu-4
(iopt(2)=1 and iopt(3)=1 also count for a uu-value
tv(q) < vv(q) < tv(q+4) ,q=1,...,nv-4
(vv(q) is either a value v(j) or v(j)+2*pi)
if iopt(1)>=0: s>=0
if s=0: nuest>=mu+6+iopt(2)+iopt(3), nvest>=mv+7
if one of these conditions is found to be violated,control is
immediately repassed to the calling program. in that case there is no
approximation returned."""
class RectSphereBivariateSpline(SphereBivariateSpline):
"""
Bivariate spline approximation over a rectangular mesh on a sphere.
Can be used for smoothing data.
.. versionadded:: 0.11.0
Parameters
----------
u : array_like
1-D array of latitude coordinates in strictly ascending order.
Coordinates must be given in radians and lie within the interval
(0, pi).
v : array_like
1-D array of longitude coordinates in strictly ascending order.
Coordinates must be given in radians, and must lie within (0, 2pi).
r : array_like
2-D array of data with shape ``(u.size, v.size)``.
s : float, optional
Positive smoothing factor defined for estimation condition
(``s=0`` is for interpolation).
pole_continuity : bool or (bool, bool), optional
Order of continuity at the poles ``u=0`` (``pole_continuity[0]``) and
``u=pi`` (``pole_continuity[1]``). The order of continuity at the pole
will be 1 or 0 when this is True or False, respectively.
Defaults to False.
pole_values : float or (float, float), optional
Data values at the poles ``u=0`` and ``u=pi``. Either the whole
parameter or each individual element can be None. Defaults to None.
pole_exact : bool or (bool, bool), optional
Data value exactness at the poles ``u=0`` and ``u=pi``. If True, the
value is considered to be the right function value, and it will be
fitted exactly. If False, the value will be considered to be a data
value just like the other data values. Defaults to False.
pole_flat : bool or (bool, bool), optional
For the poles at ``u=0`` and ``u=pi``, specify whether or not the
approximation has vanishing derivatives. Defaults to False.
See Also
--------
RectBivariateSpline : bivariate spline approximation over a rectangular
mesh
Notes
-----
Currently, only the smoothing spline approximation (``iopt[0] = 0`` and
``iopt[0] = 1`` in the FITPACK routine) is supported. The exact
least-squares spline approximation is not implemented yet.
When actually performing the interpolation, the requested `v` values must
lie within the same length 2pi interval that the original `v` values were
chosen from.
For more information, see the FITPACK_ site about this function.
.. _FITPACK: http://www.netlib.org/dierckx/spgrid.f
Examples
--------
Suppose we have global data on a coarse grid
>>> lats = np.linspace(10, 170, 9) * np.pi / 180.
>>> lons = np.linspace(0, 350, 18) * np.pi / 180.
>>> data = np.dot(np.atleast_2d(90. - np.linspace(-80., 80., 18)).T,
np.atleast_2d(180. - np.abs(np.linspace(0., 350., 9)))).T
We want to interpolate it to a global one-degree grid
>>> new_lats = np.linspace(1, 180, 180) * np.pi / 180
>>> new_lons = np.linspace(1, 360, 360) * np.pi / 180
>>> new_lats, new_lons = np.meshgrid(new_lats, new_lons)
We need to set up the interpolator object
>>> from scipy.interpolate import RectSphereBivariateSpline
>>> lut = RectSphereBivariateSpline(lats, lons, data)
Finally we interpolate the data. The `RectSphereBivariateSpline` object
only takes 1-D arrays as input, therefore we need to do some reshaping.
>>> data_interp = lut.ev(new_lats.ravel(),
... new_lons.ravel()).reshape((360, 180)).T
Looking at the original and the interpolated data, one can see that the
interpolant reproduces the original data very well:
>>> fig = plt.figure()
>>> ax1 = fig.add_subplot(211)
>>> ax1.imshow(data, interpolation='nearest')
>>> ax2 = fig.add_subplot(212)
>>> ax2.imshow(data_interp, interpolation='nearest')
>>> plt.show()
Chosing the optimal value of ``s`` can be a delicate task. Recommended
values for ``s`` depend on the accuracy of the data values. If the user
has an idea of the statistical errors on the data, she can also find a
proper estimate for ``s``. By assuming that, if she specifies the
right ``s``, the interpolator will use a spline ``f(u,v)`` which exactly
reproduces the function underlying the data, she can evaluate
``sum((r(i,j)-s(u(i),v(j)))**2)`` to find a good estimate for this ``s``.
For example, if she knows that the statistical errors on her
``r(i,j)``-values are not greater than 0.1, she may expect that a good
``s`` should have a value not larger than ``u.size * v.size * (0.1)**2``.
If nothing is known about the statistical error in ``r(i,j)``, ``s`` must
be determined by trial and error. The best is then to start with a very
large value of ``s`` (to determine the least-squares polynomial and the
corresponding upper bound ``fp0`` for ``s``) and then to progressively
decrease the value of ``s`` (say by a factor 10 in the beginning, i.e.
``s = fp0 / 10, fp0 / 100, ...`` and more carefully as the approximation
shows more detail) to obtain closer fits.
The interpolation results for different values of ``s`` give some insight
into this process:
>>> fig2 = plt.figure()
>>> s = [3e9, 2e9, 1e9, 1e8]
>>> for ii in xrange(len(s)):
>>> lut = RectSphereBivariateSpline(lats, lons, data, s=s[ii])
>>> data_interp = lut.ev(new_lats.ravel(),
... new_lons.ravel()).reshape((360, 180)).T
>>> ax = fig2.add_subplot(2, 2, ii+1)
>>> ax.imshow(data_interp, interpolation='nearest')
>>> ax.set_title("s = %g" % s[ii])
>>> plt.show()
"""
def __init__(self, u, v, r, s=0., pole_continuity=False, pole_values=None,
pole_exact=False, pole_flat=False):
iopt = np.array([0, 0, 0], dtype=int)
ider = np.array([-1, 0, -1, 0], dtype=int)
if pole_values is None:
pole_values = (None, None)
elif isinstance(pole_values, (float, np.float32, np.float64)):
pole_values = (pole_values, pole_values)
if isinstance(pole_continuity, bool):
pole_continuity = (pole_continuity, pole_continuity)
if isinstance(pole_exact, bool):
pole_exact = (pole_exact, pole_exact)
if isinstance(pole_flat, bool):
pole_flat = (pole_flat, pole_flat)
r0, r1 = pole_values
iopt[1:] = pole_continuity
if r0 is None:
ider[0] = -1
else:
ider[0] = pole_exact[0]
if r1 is None:
ider[2] = -1
else:
ider[2] = pole_exact[1]
ider[1], ider[3] = pole_flat
u, v = np.ravel(u), np.ravel(v)
if not np.all(np.diff(u) > 0.0):
raise TypeError('u must be strictly increasing')
if not np.all(np.diff(v) > 0.0):
raise TypeError('v must be strictly increasing')
if not u.size == r.shape[0]:
raise TypeError('u dimension of r must have same number of '
'elements as u')
if not v.size == r.shape[1]:
raise TypeError('v dimension of r must have same number of '
'elements as v')
if pole_continuity[1] is False and pole_flat[1] is True:
raise TypeError('if pole_continuity is False, so must be '
'pole_flat')
if pole_continuity[0] is False and pole_flat[0] is True:
raise TypeError('if pole_continuity is False, so must be '
'pole_flat')
r = np.ravel(r)
nu, tu, nv, tv, c, fp, ier = dfitpack.regrid_smth_spher(iopt, ider,
u.copy(), v.copy(), r.copy(), r0, r1, s)
if not ier in [0, -1, -2]:
msg = _spfit_messages.get(ier, 'ier=%s' % (ier))
raise ValueError(msg)
self.fp = fp
self.tck = tu[:nu], tv[:nv], c[:(nu - 4) * (nv-4)]
self.degrees = (3, 3)
|
bsd-3-clause
| -6,407,795,155,517,847,000
| 35.845485
| 81
| 0.563176
| false
| 3.57851
| false
| false
| false
|
apanda/modeling
|
tests/examples/lsrr_example.py
|
1
|
2652
|
import components
def LSRRExample ():
ctx = components.Context(['e0' , 'e1', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'], \
['ip_e0', 'ip_e1', 'ip_a', 'ip_b', 'ip_c', 'ip_d', 'ip_e', 'ip_f', 'ip_g', 'ip_h'])
net = components.Network(ctx)
# Register something that tells us about LSR
ip_lsr_field = components.LSRROption ('ip_lsr', ctx)
ctx.AddPolicy (ip_lsr_field)
e0 = components.EndHost(ctx.e0, net, ctx)
e1 = components.EndHost(ctx.e1, net, ctx)
# Yeah I can put this in a list etc., doing it this way mostly for no good reason.
a = components.LSRRRouter (ctx.a, ip_lsr_field, net, ctx)
b = components.LSRRRouter (ctx.b, ip_lsr_field, net, ctx)
c = components.LSRRRouter (ctx.c, ip_lsr_field, net, ctx)
d = components.LSRRRouter (ctx.d, ip_lsr_field, net, ctx)
e = components.LSRRRouter (ctx.e, ip_lsr_field, net, ctx)
f = components.LSRRRouter (ctx.f, ip_lsr_field, net, ctx)
g = components.LSRRRouter (ctx.g, ip_lsr_field, net, ctx)
h = components.LSRRRouter (ctx.h, ip_lsr_field, net, ctx)
net.setAddressMappings([(e0, ctx.ip_e0), \
(e1, ctx.ip_e1), \
(a, ctx.ip_a), \
(b, ctx.ip_b), \
(c, ctx.ip_c), \
(d, ctx.ip_d), \
(e, ctx.ip_e), \
(f, ctx.ip_f), \
(g, ctx.ip_g), \
(h, ctx.ip_h)])
routing_table = [(ctx.ip_e0, e0), \
(ctx.ip_e1, e1), \
(ctx.ip_a, a), \
(ctx.ip_b, b), \
(ctx.ip_c, c), \
(ctx.ip_d, d), \
(ctx.ip_e, e), \
(ctx.ip_f, f), \
(ctx.ip_g, g), \
(ctx.ip_h, h)]
nodes = [e0, e1, a, b, c, d, e, f, g, h]
node_dict = {'a': a, \
'b': b, \
'c': c, \
'd': d, \
'e': e, \
'f': f, \
'g': g, \
'h': h}
for n in nodes:
net.RoutingTable(n, routing_table)
net.Attach(*nodes)
class LSRRReturn (object):
def __init__ (self, net, ctx, e0, e1, **nodes):
self.net = net
self.ctx = ctx
self.e0 = e0
self.e1 = e1
for k, v in nodes.iteritems():
setattr(self, k, v)
self.check = components.PropertyChecker (ctx, net)
return LSRRReturn (net, ctx, e0, e1, **node_dict)
|
bsd-3-clause
| -5,174,499,600,245,879,000
| 41.774194
| 111
| 0.424962
| false
| 3.090909
| false
| false
| false
|
hjanime/VisTrails
|
vistrails/db/versions/v1_0_2/domain/vistrail.py
|
1
|
9546
|
###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
import copy
import hashlib
from auto_gen import DBVistrail as _DBVistrail
from auto_gen import DBAdd, DBChange, DBDelete, DBAbstraction, DBGroup, \
DBModule, DBAnnotation, DBActionAnnotation
from id_scope import IdScope
class DBVistrail(_DBVistrail):
def __init__(self, *args, **kwargs):
_DBVistrail.__init__(self, *args, **kwargs)
self.idScope = IdScope(remap={DBAdd.vtType: 'operation',
DBChange.vtType: 'operation',
DBDelete.vtType: 'operation',
DBAbstraction.vtType: DBModule.vtType,
DBGroup.vtType: DBModule.vtType,
DBActionAnnotation.vtType: \
DBAnnotation.vtType})
self.idScope.setBeginId('action', 1)
self.db_objects = {}
# keep a reference to the current logging information here
self.db_log_filename = None
self.log = None
def __copy__(self):
return DBVistrail.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = _DBVistrail.do_copy(self, new_ids, id_scope, id_remap)
cp.__class__ = DBVistrail
cp.idScope = copy.copy(self.idScope)
cp.db_objects = copy.copy(self.db_objects)
cp.db_log_filename = self.db_log_filename
if self.log is not None:
cp.log = copy.copy(self.log)
else:
cp.log = None
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBVistrail()
new_obj = _DBVistrail.update_version(old_obj, trans_dict, new_obj)
new_obj.update_id_scope()
if hasattr(old_obj, 'db_log_filename'):
new_obj.db_log_filename = old_obj.db_log_filename
if hasattr(old_obj, 'log'):
new_obj.log = old_obj.log
return new_obj
def update_id_scope(self):
def getOldObjId(operation):
if operation.vtType == 'change':
return operation.db_oldObjId
return operation.db_objectId
def getNewObjId(operation):
if operation.vtType == 'change':
return operation.db_newObjId
return operation.db_objectId
for action in self.db_actions:
self.idScope.updateBeginId('action', action.db_id+1)
if action.db_session is not None:
self.idScope.updateBeginId('session', action.db_session + 1)
for operation in action.db_operations:
self.idScope.updateBeginId('operation', operation.db_id+1)
if operation.vtType == 'add' or operation.vtType == 'change':
# update ids of data
self.idScope.updateBeginId(operation.db_what,
getNewObjId(operation)+1)
if operation.db_data is None:
if operation.vtType == 'change':
operation.db_objectId = operation.db_oldObjId
self.db_add_object(operation.db_data)
for annotation in action.db_annotations:
self.idScope.updateBeginId('annotation', annotation.db_id+1)
for annotation in self.db_annotations:
self.idScope.updateBeginId('annotation', annotation.db_id+1)
for annotation in self.db_actionAnnotations:
self.idScope.updateBeginId('annotation', annotation.db_id+1)
def db_add_object(self, obj):
self.db_objects[(obj.vtType, obj.db_id)] = obj
def db_get_object(self, type, id):
return self.db_objects.get((type, id), None)
def db_update_object(self, obj, **kwargs):
# want to swap out old object with a new version
# need this for updating aliases...
# hack it using setattr...
real_obj = self.db_objects[(obj.vtType, obj.db_id)]
for (k, v) in kwargs.iteritems():
if hasattr(real_obj, k):
setattr(real_obj, k, v)
def update_checkout_version(self, app=''):
checkout_key = "__checkout_version_"
action_key = checkout_key + app
annotation_key = action_key + '_annotationhash'
action_annotation_key = action_key + '_actionannotationhash'
# delete previous checkout annotations
deletekeys = [action_key,annotation_key,action_annotation_key]
for key in deletekeys:
while self.db_has_annotation_with_key(key):
a = self.db_get_annotation_by_key(key)
self.db_delete_annotation(a)
# annotation hash - requires annotations to be clean
value = self.hashAnnotations()
if self.db_has_annotation_with_key(annotation_key):
annotation = self.db_get_annotation_by_key(annotation_key)
annotation.db_value = value
else:
annotation=DBAnnotation(self.idScope.getNewId(DBAnnotation.vtType),
annotation_key, value)
self.db_add_annotation(annotation)
# action annotation hash
value = self.hashActionAnnotations()
if self.db_has_annotation_with_key(action_annotation_key):
annotation = self.db_get_annotation_by_key(action_annotation_key)
annotation.db_value = value
else:
annotation=DBAnnotation(self.idScope.getNewId(DBAnnotation.vtType),
action_annotation_key, value)
self.db_add_annotation(annotation)
# last action id hash
if len(self.db_actions) == 0:
value = 0
else:
value = max(v.db_id for v in self.db_actions)
if self.db_has_annotation_with_key(action_key):
annotation = self.db_get_annotation_by_key(action_key)
annotation.db_value = str(value)
else:
annotation=DBAnnotation(self.idScope.getNewId(DBAnnotation.vtType),
action_key, str(value))
self.db_add_annotation(annotation)
def hashAnnotations(self):
annotations = {}
for annotation in self.db_annotations:
if annotation._db_key not in annotations:
annotations[annotation._db_key] = []
if annotation._db_value not in annotations[annotation._db_key]:
annotations[annotation._db_key].append(annotation._db_value)
keys = annotations.keys()
keys.sort()
m = hashlib.md5()
for k in keys:
m.update(str(k))
annotations[k].sort()
for v in annotations[k]:
m.update(str(v))
return m.hexdigest()
def hashActionAnnotations(self):
action_annotations = {}
for action_id, key, value in [[aa.db_action_id, aa.db_key,
aa.db_value] for aa in self.db_actionAnnotations]:
index = (str(action_id), key)
if index not in action_annotations:
action_annotations[index] = []
if value not in action_annotations[index]:
action_annotations[index].append(value)
keys = action_annotations.keys()
keys.sort()
m = hashlib.md5()
for k in keys:
m.update(k[0] + k[1])
action_annotations[k].sort()
for v in action_annotations[k]:
m.update(str(v))
return m.hexdigest()
|
bsd-3-clause
| -2,098,377,584,519,645,000
| 42.589041
| 80
| 0.593233
| false
| 4.154047
| false
| false
| false
|
fake-name/ChromeController
|
ChromeController/manager.py
|
1
|
35830
|
import distutils.spawn
import os.path
import sys
import subprocess
import pprint
import types
import json
import base64
import signal
import pprint
import time
import http.cookiejar
import urllib.parse
import ChromeController.filter_funcs as filter_funcs
from ChromeController.cr_exceptions import ChromeResponseNotReceived
from ChromeController.cr_exceptions import ChromeNavigateTimedOut
from ChromeController.cr_exceptions import ChromeError
from ChromeController.resources import js
# We use the generated wrapper. If you want a different version, use the CLI interface to update.
from ChromeController.Generator.Generated import ChromeRemoteDebugInterface as ChromeRemoteDebugInterface_base
DEFAULT_TIMEOUT_SECS = 10
class RemoteObject():
def __init__(self, object_meta):
self.object_meta = object_meta
# TODO: Allow retreiving/interacting with these.
def __repr__(self):
return "<(Unimplemented) RemoteObject for JS object: '%s'>" % (self.object_meta, )
class ChromeRemoteDebugInterface(ChromeRemoteDebugInterface_base):
'''
Remote control class for Chromium.
'''
def __init__(self,
binary = None,
dbg_port = None,
use_execution_manager = None,
additional_options = [],
visible_size = None,
disable_page = False,
disable_dom = False,
disable_network = False,
*args,
**kwargs):
super().__init__(
binary = binary,
dbg_port = dbg_port,
use_execution_manager = use_execution_manager,
additional_options = additional_options,
*args, **kwargs)
if disable_page:
self.log.debug("Not enabling page debug interface")
else:
self.Page_enable()
if disable_dom:
self.log.debug("Not enabling DOM debug interface")
else:
self.DOM_enable()
if disable_network:
self.log.debug("Not enabling Network debug interface")
else:
self.Network_enable()
if visible_size:
assert isinstance(visible_size, tuple), "visible_size must be a 2-tuple containing 2 integers"
assert len(visible_size) == 2, "visible_size must be a 2-tuple containing 2 integers"
assert all([isinstance(val, int) for val in visible_size]), "visible_size must be a 2-tuple containing 2 integers"
self.log.debug("Visible size overridden to %sx%s" % visible_size)
self.Emulation_setVisibleSize(*visible_size)
else:
self.Emulation_setVisibleSize(1024, 1366)
self.__new_tab_scripts = []
# cr_ver = self.Browser_getVersion()
# self.log.debug("Remote browser version info:")
# self.log.debug(str(cr_ver))
# 'protocolVersion'
# 'product'
# 'revision'
# 'userAgent'
# 'jsVersion'
def update_headers(self, header_args):
'''
Given a set of headers, update both the user-agent
and additional headers for the remote browser.
header_args must be a dict. Keys are the names of
the corresponding HTTP header.
return value is a 2-tuple of the results of the user-agent
update, as well as the extra headers update.
If no 'User-Agent' key is present in the new headers,
the first item in the tuple will be None
'''
assert isinstance(header_args, dict), "header_args must be a dict, passed type was %s" \
% (type(header_args), )
ua = header_args.pop('User-Agent', None)
ret_1 = None
if ua:
ret_1 = self.Network_setUserAgentOverride(userAgent=ua)
ret_2 = self.Network_setExtraHTTPHeaders(headers = header_args)
return (ret_1, ret_2)
def __remove_default_members(self, js_object):
ret = []
# This is kind of horrible
for item in js_object:
if 'name' in item:
if item['name'] == '__defineGetter__':
continue
if item['name'] == '__defineSetter__':
continue
if item['name'] == '__lookupGetter__':
continue
if item['name'] == '__lookupSetter__':
continue
if item['name'] == '__proto__':
continue
if item['name'] == 'constructor':
continue
if item['name'] == 'hasOwnProperty':
continue
if item['name'] == 'isPrototypeOf':
continue
if item['name'] == 'propertyIsEnumerable':
continue
if item['name'] == 'toLocaleString':
continue
if item['name'] == 'toString':
continue
if item['name'] == 'valueOf':
continue
if item['name'] == 'ABORT_ERR':
continue
if item['name'] == 'DATA_CLONE_ERR':
continue
if item['name'] == 'INUSE_ATTRIBUTE_ERR':
continue
if item['name'] == 'INVALID_ACCESS_ERR':
continue
if item['name'] == 'INVALID_CHARACTER_ERR':
continue
if item['name'] == 'INVALID_MODIFICATION_ERR':
continue
if item['name'] == 'INVALID_NODE_TYPE_ERR':
continue
if item['name'] == 'INVALID_STATE_ERR':
continue
if item['name'] == 'NAMESPACE_ERR':
continue
if item['name'] == 'NETWORK_ERR':
continue
if item['name'] == 'NO_DATA_ALLOWED_ERR':
continue
if item['name'] == 'NO_MODIFICATION_ALLOWED_ERR':
continue
if item['name'] == 'NOT_FOUND_ERR':
continue
if item['name'] == 'NOT_SUPPORTED_ERR':
continue
if item['name'] == 'QUOTA_EXCEEDED_ERR':
continue
if item['name'] == 'SECURITY_ERR':
continue
if item['name'] == 'SYNTAX_ERR':
continue
if item['name'] == 'TIMEOUT_ERR':
continue
if item['name'] == 'TYPE_MISMATCH_ERR':
continue
if item['name'] == 'URL_MISMATCH_ERR':
continue
if item['name'] == 'VALIDATION_ERR':
continue
if item['name'] == 'WRONG_DOCUMENT_ERR':
continue
if item['name'] == 'DOMSTRING_SIZE_ERR':
continue
if item['name'] == 'HIERARCHY_REQUEST_ERR':
continue
if item['name'] == 'INDEX_SIZE_ERR':
continue
ret.append(item)
return ret
def __unpack_object(self, object):
assert isinstance(object, dict), "Object values must be a dict! Passed %s (%s)" % (type(object), object)
ret = {}
for key, value in object.items():
assert isinstance(key, str)
if isinstance(value, str):
ret[key] = value
elif isinstance(value, int):
ret[key] = value
elif isinstance(value, float):
ret[key] = value
elif value is None: # Dammit, NoneType isn't exposed
ret[key] = value
elif value in (True, False):
ret[key] = value
elif isinstance(value, dict):
ret[key] = self.__unpack_object(value)
else:
raise ValueError("Unknown type in object: %s (%s)" % (type(value), value))
return ret
def __decode_serialized_value(self, value):
assert 'type' in value, "Missing 'type' key from value: '%s'" % (value, )
if 'get' in value and 'set' in value:
self.log.debug("Unserializable remote script object")
return RemoteObject(value['objectId'])
if value['type'] == 'object' and 'objectId' in value:
self.log.debug("Unserializable remote script object")
return RemoteObject(value['objectId'])
assert 'value' in value, "Missing 'value' key from value: '%s'" % (value, )
if value['type'] == 'number':
return float(value['value'])
if value['type'] == 'string':
return value['value']
if value['type'] == 'object':
return self.__unpack_object(value['value'])
# Special case for null/none objects
if (
'subtype' in value
and
value['subtype'] == 'null'
and
value['type'] == 'object'
and
value['value'] is None):
return None
self.log.warning("Unknown serialized javascript value of type %s", value['type'])
self.log.warning("Complete value: %s", value)
return value
def _unpack_xhr_resp(self, values):
ret = {}
# Handle single objects without all the XHR stuff.
# This seems to be a chrome 84 change.
if set(values.keys()) == set(['type', 'value']):
if values['type'] == 'object':
return self.__decode_serialized_value(values)
for entry in values:
# assert 'configurable' in entry, "'configurable' missing from entry (%s, %s)" % (entry, values)
# assert 'enumerable' in entry, "'enumerable' missing from entry (%s, %s)" % (entry, values)
# assert 'isOwn' in entry, "'isOwn' missing from entry (%s, %s)" % (entry, values)
assert 'name' in entry, "'name' missing from entry (%s, %s)" % (entry, values)
assert 'value' in entry, "'value' missing from entry (%s, %s)" % (entry, values)
# assert 'writable' in entry, "'writable' missing from entry (%s, %s)" % (entry, values)
if 'isOwn' in entry and entry['isOwn'] is False:
continue
assert entry['name'] not in ret
ret[entry['name']] = self.__decode_serialized_value(entry['value'])
return ret
def xhr_fetch(self, url, headers=None, post_data=None, post_type=None):
'''
Execute a XMLHttpRequest() for content at `url`. If
`headers` are specified, they must be a dict of string:string
keader:values. post_data must also be pre-encoded.
Note that this will be affected by the same-origin policy of the current
page, so it can fail if you are requesting content from another domain and
the current site has restrictive same-origin policies (which is very common).
'''
'''
If you're thinking this is kind of a hack, well, it is.
We also cheat a bunch and use synchronous XMLHttpRequest()s, because it
SO much easier.
'''
js_script = '''
function (url, headers, post_data, post_type){
var req = new XMLHttpRequest();
// We use sync calls, since we want to wait until the call completes
// This will probably be depreciated at some point.
if (post_data)
{
req.open("POST", url, false);
if (post_type)
req.setRequestHeader("Content-Type", post_type);
}
else
req.open("GET", url, false);
if (headers)
{
let entries = Object.entries(headers);
for (let idx = 0; idx < entries.length; idx += 1)
{
req.setRequestHeader(entries[idx][0], entries[idx][1]);
}
}
if (post_data)
req.send(post_data);
else
req.send();
return {
url : url,
headers : headers,
resp_headers : req.getAllResponseHeaders(),
post : post_data,
response : req.responseText,
mimetype : req.getResponseHeader("Content-Type"),
code : req.status
};
}
'''
ret = self.execute_javascript_function(js_script, [url, headers, post_data, post_type])
# print()
# print()
# print("XHR Response")
# pprint.pprint(ret)
# print()
# print()
ret = self._unpack_xhr_resp(ret)
return ret
# if
def __unwrap_object_return(self, ret):
if "result" in ret and 'result' in ret['result']:
res = ret['result']['result']
if 'objectId' in res:
resp4 = self.Runtime_getProperties(res['objectId'])
if "result" in resp4 and 'result' in resp4['result']:
res_full = resp4['result']['result']
return self.__remove_default_members(res_full)
# Direct POD type return, just use it directly.
if "type" in res and "value" in res:
return res
self.log.error("Failed fetching results from call!")
return ret
def __exec_js(self, script, should_call=False, args=None):
'''
Execute the passed javascript function/statement, optionally with passed
arguments.
Note that if args is not False, or should_call is True the passed script
will be treated as a function definition and called via
`(script).apply(null, args)`. Otherwise, the passed script will simply
be evaluated.
Note that if `script` is not a function, it must be a single statement.
The presence of semicolons not enclosed in a bracket scope will produce
an error.
'''
if args is None:
args = {}
# How chromedriver does this:
# std::unique_ptr<base::Value>* result) {
# std::string json;
# base::JSONWriter::Write(args, &json);
# // TODO(zachconrad): Second null should be array of shadow host ids.
# std::string expression = base::StringPrintf(
# "(%s).apply(null, [null, %s, %s])",
# kCallFunctionScript,
# function.c_str(),
# json.c_str());
if args or should_call:
expression = "({script}).apply(null, JSON.parse({args}))".format(
script=script,
args=repr(json.dumps(args))
)
else:
expression = "({script})".format(
script=script,
)
resp3 = self.Runtime_evaluate(expression=expression, returnByValue=True)
resp4 = self.__unwrap_object_return(resp3)
return resp4
# Interact with http.cookiejar.Cookie() instances
def get_cookies(self):
'''
Retreive the cookies from the remote browser.
Return value is a list of http.cookiejar.Cookie() instances.
These can be directly used with the various http.cookiejar.XXXCookieJar
cookie management classes.
'''
ret = self.Network_getAllCookies()
assert 'result' in ret, "No return value in function response!"
assert 'cookies' in ret['result'], "No 'cookies' key in function response"
cookies = []
for raw_cookie in ret['result']['cookies']:
# Chromium seems to support the following key values for the cookie dict:
# "name"
# "value"
# "domain"
# "path"
# "expires"
# "httpOnly"
# "session"
# "secure"
#
# This seems supported by the fact that the underlying chromium cookie implementation has
# the following members:
# std::string name_;
# std::string value_;
# std::string domain_;
# std::string path_;
# base::Time creation_date_;
# base::Time expiry_date_;
# base::Time last_access_date_;
# bool secure_;
# bool httponly_;
# CookieSameSite same_site_;
# CookiePriority priority_;
#
# See chromium/net/cookies/canonical_cookie.h for more.
#
# I suspect the python cookie implementation is derived exactly from the standard, while the
# chromium implementation is more of a practically derived structure.
# Network.setCookie
baked_cookie = http.cookiejar.Cookie(
# We assume V0 cookies, principally because I don't think I've /ever/ actually encountered a V1 cookie.
# Chromium doesn't seem to specify it.
version = 0,
name = raw_cookie['name'],
value = raw_cookie['value'],
port = None,
port_specified = False,
domain = raw_cookie['domain'],
domain_specified = True,
domain_initial_dot = False,
path = raw_cookie['path'],
path_specified = False,
secure = raw_cookie['secure'],
expires = raw_cookie['expires'],
discard = raw_cookie['session'],
comment = None,
comment_url = None,
rest = {"httponly":"%s" % raw_cookie['httpOnly']},
rfc2109 = False
)
cookies.append(baked_cookie)
return cookies
def set_cookie(self, cookie):
'''
Add a cookie to the remote chromium instance.
Passed value `cookie` must be an instance of `http.cookiejar.Cookie()`.
'''
# Function path: Network.setCookie
# Domain: Network
# Method name: setCookie
# WARNING: This function is marked 'Experimental'!
# Parameters:
# Required arguments:
# 'url' (type: string) -> The request-URI to associate with the setting of the cookie. This value can affect the default domain and path values of the created cookie.
# 'name' (type: string) -> The name of the cookie.
# 'value' (type: string) -> The value of the cookie.
# Optional arguments:
# 'domain' (type: string) -> If omitted, the cookie becomes a host-only cookie.
# 'path' (type: string) -> Defaults to the path portion of the url parameter.
# 'secure' (type: boolean) -> Defaults ot false.
# 'httpOnly' (type: boolean) -> Defaults to false.
# 'sameSite' (type: CookieSameSite) -> Defaults to browser default behavior.
# 'expirationDate' (type: Timestamp) -> If omitted, the cookie becomes a session cookie.
# Returns:
# 'success' (type: boolean) -> True if successfully set cookie.
# Description: Sets a cookie with the given cookie data; may overwrite equivalent cookies if they exist.
assert isinstance(cookie, http.cookiejar.Cookie), 'The value passed to `set_cookie` must be an instance of http.cookiejar.Cookie().' + \
' Passed: %s ("%s").' % (type(cookie), cookie)
# Yeah, the cookielib stores this attribute as a string, despite it containing a
# boolean value. No idea why.
is_http_only = str(cookie.get_nonstandard_attr('httponly', 'False')).lower() == "true"
# I'm unclear what the "url" field is actually for. A cookie only needs the domain and
# path component to be fully defined. Considering the API apparently allows the domain and
# path parameters to be unset, I think it forms a partially redundant, with some
# strange interactions with mode-changing between host-only and more general
# cookies depending on what's set where.
# Anyways, given we need a URL for the API to work properly, we produce a fake
# host url by building it out of the relevant cookie properties.
fake_url = urllib.parse.urlunsplit((
"http" if is_http_only else "https", # Scheme
cookie.domain, # netloc
cookie.path, # path
'', # query
'', # fragment
))
params = {
'url' : fake_url,
'name' : cookie.name,
'value' : cookie.value if cookie.value else "",
'domain' : cookie.domain,
'path' : cookie.path,
'secure' : cookie.secure,
'expires' : float(cookie.expires) if cookie.expires else float(2**32),
'httpOnly' : is_http_only,
# The "sameSite" flag appears to be a chromium-only extension for controlling
# cookie sending in non-first-party contexts. See:
# https://bugs.chromium.org/p/chromium/issues/detail?id=459154
# Anyways, we just use the default here, whatever that is.
# sameSite = cookie.xxx
}
ret = self.Network_setCookie(**params)
return ret
def clear_cookies(self):
'''
At this point, this is just a thin shim around the Network_clearBrowserCookies() operation.
That function postdates the clear_cookies() call here.
'''
self.Network_clearBrowserCookies()
def navigate_to(self, url):
'''
Trigger a page navigation to url `url`.
Note that this is done via javascript injection, and as such results in
the `referer` header being sent with the url of the network location.
This is useful when a page's navigation is stateful, or for simple
cases of referrer spoofing.
'''
assert "'" not in url
return self.__exec_js("window.location.href = '{}'".format(url))
def get_current_url(self):
'''
Probe the remote session for the current window URL.
This is primarily used to do things like unwrap redirects,
or circumvent outbound url wrappers.
'''
res = self.Page_getNavigationHistory()
assert 'result' in res
assert 'currentIndex' in res['result']
assert 'entries' in res['result']
return res['result']['entries'][res['result']['currentIndex']]['url']
def get_page_url_title(self):
'''
Get the title and current url from the remote session.
Return is a 2-tuple: (page_title, page_url).
'''
cr_tab_id = self.transport._get_cr_tab_meta_for_key(self.tab_id)['id']
targets = self.Target_getTargets()
assert 'result' in targets
assert 'targetInfos' in targets['result']
for tgt in targets['result']['targetInfos']:
if tgt['targetId'] == cr_tab_id:
# {
# 'title': 'Page Title 1',
# 'targetId': '9d2c503c-e39e-42cc-b950-96db073918ee',
# 'attached': True,
# 'url': 'http://localhost:47181/with_title_1',
# 'type': 'page'
# }
title = tgt['title']
cur_url = tgt['url']
return title, cur_url
def click_link_containing_url(self, url):
'''
TODO
'''
# exec_func =
self.__exec_js("window.location.href = '/test'")
# js.kCallFunctionScript
# "window.history.back();"
# elem = self.find_element("//a".format(url))
# print(elem)
def execute_javascript_statement(self, script):
'''
Execute a javascript string in the context of the browser tab.
This only works for simple JS statements. More complex usage should
be via execute_javascript_function().
This can also be used to interrogate the JS interpreter, as simply passing
variable names of interest will return the variable value.
'''
ret = self.__exec_js(script=script)
return ret
def execute_javascript_function(self, script, args=None):
'''
Execute a javascript function in the context of the browser tab.
The passed script must be a single function definition, which will
be called via ({script}).apply(null, {args}).
'''
ret = self.__exec_js(script=script, should_call=True, args=args)
return ret
def find_element(self, search):
'''
DOM_performSearch(self, query, includeUserAgentShadowDOM)
Python Function: DOM_performSearch
Domain: DOM
Method name: performSearch
WARNING: This function is marked 'Experimental'!
Parameters:
'query' (type: string) -> Plain text or query selector or XPath search query.
'includeUserAgentShadowDOM' (type: boolean) -> True to search in user agent shadow DOM.
Returns:
'searchId' (type: string) -> Unique search session identifier.
'resultCount' (type: integer) -> Number of search results.
Description: Searches for a given string in the DOM tree. Use <code>getSearchResults</code> to access search results or <code>cancelSearch</code> to end this search session.
Python Function: DOM_getSearchResults
Domain: DOM
Method name: getSearchResults
WARNING: This function is marked 'Experimental'!
Parameters:
'searchId' (type: string) -> Unique search session identifier.
'fromIndex' (type: integer) -> Start index of the search result to be returned.
'toIndex' (type: integer) -> End index of the search result to be returned.
Returns:
'nodeIds' (type: array) -> Ids of the search result nodes.
Description: Returns search results from given <code>fromIndex</code> to given <code>toIndex</code> from the sarch with the given identifier.
DOM_discardSearchResults(self, searchId)
Python Function: DOM_discardSearchResults
Domain: DOM
Method name: discardSearchResults
WARNING: This function is marked 'Experimental'!
Parameters:
'searchId' (type: string) -> Unique search session identifier.
No return value.
Description: Discards search results from the session with the given id. <code>getSearchResults</code> should no longer be called for that search.
'''
res = self.DOM_performSearch(search, includeUserAgentShadowDOM=False)
assert 'result' in res
assert 'searchId' in res['result']
searchid = res['result']['searchId']
res_cnt = res['result']['resultCount']
self.log.debug("%s", res)
self.log.debug("%s", searchid)
if res_cnt == 0:
return None
items = self.DOM_getSearchResults(searchId=searchid, fromIndex=0, toIndex=res_cnt)
self.log.debug("Results:")
self.log.debug("%s", items)
# DOM_getSearchResults
def click_element(self, contains_url):
'''
TODO
ChromeDriver source for how to click an element:
Status ExecuteClickElement(Session* session,
WebView* web_view,
const std::string& element_id,
const base::DictionaryValue& params,
std::unique_ptr<base::Value>* value) {
std::string tag_name;
Status status = GetElementTagName(session, web_view, element_id, &tag_name);
if (status.IsError())
return status;
if (tag_name == "option") {
bool is_toggleable;
status = IsOptionElementTogglable(
session, web_view, element_id, &is_toggleable);
if (status.IsError())
return status;
if (is_toggleable)
return ToggleOptionElement(session, web_view, element_id);
else
return SetOptionElementSelected(session, web_view, element_id, true);
} else {
WebPoint location;
status = GetElementClickableLocation(
session, web_view, element_id, &location);
if (status.IsError())
return status;
std::list<MouseEvent> events;
events.push_back(
MouseEvent(kMovedMouseEventType, kNoneMouseButton,
location.x, location.y, session->sticky_modifiers, 0));
events.push_back(
MouseEvent(kPressedMouseEventType, kLeftMouseButton,
location.x, location.y, session->sticky_modifiers, 1));
events.push_back(
MouseEvent(kReleasedMouseEventType, kLeftMouseButton,
location.x, location.y, session->sticky_modifiers, 1));
status =
web_view->DispatchMouseEvents(events, session->GetCurrentFrameId());
if (status.IsOk())
session->mouse_position = location;
return status;
}
}
'''
pass
def get_unpacked_response_body(self, requestId, mimetype="application/unknown"):
'''
Return a unpacked, decoded resposne body from Network_getResponseBody()
'''
content = self.Network_getResponseBody(requestId)
assert 'result' in content
result = content['result']
assert 'base64Encoded' in result
assert 'body' in result
if result['base64Encoded']:
content = base64.b64decode(result['body'])
else:
content = result['body']
self.log.info("Navigate complete. Received %s byte response with type %s.", len(content), mimetype)
return {'binary' : result['base64Encoded'], 'mimetype' : mimetype, 'content' : content}
def handle_page_location_changed(self, timeout=None):
'''
If the chrome tab has internally redirected (generally because jerberscript), this
will walk the page navigation responses and attempt to fetch the response body for
the tab's latest location.
'''
# In general, this is often called after other mechanisms have confirmed
# that the tab has already navigated. As such, we want to not wait a while
# to discover something went wrong, so use a timeout that basically just
# results in checking the available buffer, and nothing else.
if not timeout:
timeout = 0.1
self.log.debug("We may have redirected. Checking.")
messages = self.transport.recv_all_filtered(filter_funcs.capture_loading_events, tab_key=self.tab_id)
if not messages:
raise ChromeError("Couldn't track redirect! No idea what to do!")
last_message = messages[-1]
self.log.info("Probably a redirect! New content url: '%s'", last_message['params']['documentURL'])
resp = self.transport.recv_filtered(filter_funcs.network_response_recieved_for_url(last_message['params']['documentURL'], last_message['params']['frameId']), tab_key=self.tab_id)
resp = resp['params']
ctype = 'application/unknown'
resp_response = resp['response']
if 'mimeType' in resp_response:
ctype = resp_response['mimeType']
if 'headers' in resp_response and 'content-type' in resp_response['headers']:
ctype = resp_response['headers']['content-type'].split(";")[0]
# We assume the last document request was the redirect.
# This is /probably/ kind of a poor practice, but what the hell.
# I have no idea what this would do if there are non-html documents (or if that can even happen.)
return self.get_unpacked_response_body(last_message['params']['requestId'], mimetype=ctype)
def blocking_navigate_and_get_source(self, url, timeout=DEFAULT_TIMEOUT_SECS):
'''
Do a blocking navigate to url `url`, and then extract the
response body and return that.
This effectively returns the *unrendered* page content that's sent over the wire. As such,
if the page does any modification of the contained markup during rendering (via javascript), this
function will not reflect the changes made by the javascript.
The rendered page content can be retreived by calling `get_rendered_page_source()`.
Due to the remote api structure, accessing the raw content after the content has been loaded
is not possible, so any task requiring the raw content must be careful to request it
before it actually navigates to said content.
Return value is a dictionary with two keys:
{
'binary' : (boolean, true if content is binary, false if not)
'content' : (string of bytestring, depending on whether `binary` is true or not)
}
'''
resp = self.blocking_navigate(url, timeout)
assert 'requestId' in resp
assert 'response' in resp
# self.log.debug('blocking_navigate Response %s', pprint.pformat(resp))
ctype = 'application/unknown'
resp_response = resp['response']
if 'mimeType' in resp_response:
ctype = resp_response['mimeType']
if 'headers' in resp_response and 'content-type' in resp_response['headers']:
ctype = resp_response['headers']['content-type'].split(";")[0]
self.log.debug("Trying to get response body")
try:
ret = self.get_unpacked_response_body(resp['requestId'], mimetype=ctype)
except ChromeError:
ret = self.handle_page_location_changed(timeout)
return ret
def get_rendered_page_source(self, dom_idle_requirement_secs=3, max_wait_timeout=30):
'''
Get the HTML markup for the current page.
This is done by looking up the root DOM node, and then requesting the outer HTML
for that node ID.
This calls return will reflect any modifications made by javascript to the
page. For unmodified content, use `blocking_navigate_and_get_source()`
dom_idle_requirement_secs specifies the period of time for which there must have been no
DOM modifications before treating the rendered output as "final". This call will therefore block for
at least dom_idle_requirement_secs seconds.
'''
# There are a bunch of events which generally indicate a page is still doing *things*.
# I have some concern about how this will handle things like advertisements, which
# basically load crap forever. That's why we have the max_wait_timeout.
target_events = [
"Page.frameResized",
"Page.frameStartedLoading",
"Page.frameNavigated",
"Page.frameAttached",
"Page.frameStoppedLoading",
"Page.frameScheduledNavigation",
"Page.domContentEventFired",
"Page.frameClearedScheduledNavigation",
"Page.loadEventFired",
"DOM.documentUpdated",
"DOM.childNodeInserted",
"DOM.childNodeRemoved",
"DOM.childNodeCountUpdated",
]
start_time = time.time()
try:
while 1:
if time.time() - start_time > max_wait_timeout:
self.log.debug("Page was not idle after waiting %s seconds. Giving up and extracting content now.", max_wait_timeout)
self.transport.recv_filtered(
filter_funcs.wait_for_methods(target_events),
tab_key = self.tab_id,
timeout = dom_idle_requirement_secs
)
except ChromeResponseNotReceived:
# We timed out, the DOM is probably idle.
pass
# We have to find the DOM root node ID
dom_attr = self.DOM_getDocument(depth=-1, pierce=False)
assert 'result' in dom_attr
assert 'root' in dom_attr['result']
assert 'nodeId' in dom_attr['result']['root']
# Now, we have the root node ID.
root_node_id = dom_attr['result']['root']['nodeId']
# Use that to get the HTML for the specified node
response = self.DOM_getOuterHTML(nodeId=root_node_id)
assert 'result' in response
assert 'outerHTML' in response['result']
return response['result']['outerHTML']
def take_screeshot(self):
'''
Take a screenshot of the virtual viewport content.
Return value is a png image as a bytestring.
'''
resp = self.Page_captureScreenshot()
assert 'result' in resp
assert 'data' in resp['result']
imgdat = base64.b64decode(resp['result']['data'])
return imgdat
def blocking_navigate(self, url, timeout=DEFAULT_TIMEOUT_SECS):
'''
Do a blocking navigate to url `url`.
This function triggers a navigation, and then waits for the browser
to claim the page has finished loading.
Roughly, this corresponds to the javascript `DOMContentLoaded` event,
meaning the dom for the page is ready.
Internals:
A navigation command results in a sequence of events:
- Page.frameStartedLoading" (with frameid)
- Page.frameStoppedLoading" (with frameid)
- Page.loadEventFired" (not attached to an ID)
Therefore, this call triggers a navigation option,
and then waits for the expected set of response event messages.
'''
self.transport.flush(tab_key=self.tab_id)
self.log.debug("Blocking navigate to URL: '%s'", url)
ret = self.Page_navigate(url = url)
assert("result" in ret), "Missing return content"
assert("frameId" in ret['result']), "Missing 'frameId' in return content"
assert("loaderId" in ret['result']), "Missing 'loaderId' in return content"
expected_id = ret['result']['frameId']
loader_id = ret['result']['loaderId']
try:
self.log.debug("Waiting for frame navigated command response.")
self.transport.recv_filtered(filter_funcs.check_frame_navigated_command(expected_id), tab_key=self.tab_id, timeout=timeout)
self.log.debug("Waiting for frameStartedLoading response.")
self.transport.recv_filtered(filter_funcs.check_frame_load_command("Page.frameStartedLoading"), tab_key=self.tab_id, timeout=timeout)
self.log.debug("Waiting for frameStoppedLoading response.")
self.transport.recv_filtered(filter_funcs.check_frame_load_command("Page.frameStoppedLoading"), tab_key=self.tab_id, timeout=timeout)
# self.transport.recv_filtered(check_load_event_fired, tab_key=self.tab_id, timeout=timeout)
self.log.debug("Waiting for responseReceived response.")
resp = self.transport.recv_filtered(filter_funcs.network_response_recieved_for_url(url=None, expected_id=expected_id), tab_key=self.tab_id, timeout=timeout)
if resp is None:
raise ChromeNavigateTimedOut("Blocking navigate timed out!")
return resp['params']
# The `Page.frameNavigated ` event does not get fired for non-markup responses.
# Therefore, if we timeout on waiting for that, check to see if we received a binary response.
except ChromeResponseNotReceived:
# So this is basically broken, fix is https://bugs.chromium.org/p/chromium/issues/detail?id=831887
# but that bug report isn't fixed yet.
# Siiiigh.
self.log.warning("Failed to receive expected response to navigate command. Checking if response is a binary object.")
resp = self.transport.recv_filtered(
keycheck = filter_funcs.check_frame_loader_command(
method_name = "Network.responseReceived",
loader_id = loader_id
),
tab_key = self.tab_id,
timeout = timeout)
return resp['params']
def new_tab(self, *args, **kwargs):
tab = super().new_tab(*args, **kwargs)
for script in self.__new_tab_scripts:
tab.Page_addScriptToEvaluateOnNewDocument(script)
return tab
def install_evasions(self):
'''
Load headless detection evasions from the puppeteer-extra repository (
https://github.com/berstend/puppeteer-extra/tree/master/packages/puppeteer-extra-plugin-stealth/evasions).
'''
from ChromeController.resources import evasions
scripts = evasions.load_evasions()
self.__new_tab_scripts.extend(scripts.values())
for script, contents in scripts.items():
print("Loading '%s'" % script)
ret = self.Page_addScriptToEvaluateOnNewDocument(contents)
pprint.pprint(ret)
ret2 = self.execute_javascript_function("function()" + contents)
pprint.pprint(ret2)
# ret3 = self.execute_javascript_statement(contents)
# pprint.pprint(ret3)
|
bsd-3-clause
| -1,907,269,585,833,294,600
| 31.221223
| 184
| 0.661792
| false
| 3.491522
| false
| false
| false
|
bdang2012/taiga-back-casting
|
taiga/projects/signals.py
|
1
|
3787
|
# Copyright (C) 2014-2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014-2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.apps import apps
from django.conf import settings
from taiga.projects.services.tags_colors import update_project_tags_colors_handler, remove_unused_tags
from taiga.projects.notifications.services import create_notify_policy_if_not_exists
from taiga.base.utils.db import get_typename_for_model_class
####################################
# Signals over project items
####################################
## TAGS
def tags_normalization(sender, instance, **kwargs):
if isinstance(instance.tags, (list, tuple)):
instance.tags = list(map(str.lower, instance.tags))
def update_project_tags_when_create_or_edit_taggable_item(sender, instance, **kwargs):
update_project_tags_colors_handler(instance)
def update_project_tags_when_delete_taggable_item(sender, instance, **kwargs):
remove_unused_tags(instance.project)
instance.project.save()
def membership_post_delete(sender, instance, using, **kwargs):
instance.project.update_role_points()
def create_notify_policy(sender, instance, using, **kwargs):
if instance.user:
create_notify_policy_if_not_exists(instance.project, instance.user)
def project_post_save(sender, instance, created, **kwargs):
"""
Populate new project dependen default data
"""
if not created:
return
if instance._importing:
return
template = getattr(instance, "creation_template", None)
if template is None:
ProjectTemplate = apps.get_model("projects", "ProjectTemplate")
template = ProjectTemplate.objects.get(slug=settings.DEFAULT_PROJECT_TEMPLATE)
template.apply_to_project(instance)
instance.save()
Role = apps.get_model("users", "Role")
try:
owner_role = instance.roles.get(slug=template.default_owner_role)
except Role.DoesNotExist:
owner_role = instance.roles.first()
if owner_role:
Membership = apps.get_model("projects", "Membership")
Membership.objects.create(user=instance.owner, project=instance, role=owner_role,
is_owner=True, email=instance.owner.email)
def try_to_close_or_open_user_stories_when_edit_us_status(sender, instance, created, **kwargs):
from taiga.projects.userstories import services
for user_story in instance.user_stories.all():
if services.calculate_userstory_is_closed(user_story):
services.close_userstory(user_story)
else:
services.open_userstory(user_story)
def try_to_close_or_open_user_stories_when_edit_task_status(sender, instance, created, **kwargs):
from taiga.projects.userstories import services
UserStory = apps.get_model("userstories", "UserStory")
for user_story in UserStory.objects.filter(tasks__status=instance).distinct():
if services.calculate_userstory_is_closed(user_story):
services.close_userstory(user_story)
else:
services.open_userstory(user_story)
|
agpl-3.0
| -6,576,361,779,421,681,000
| 35.747573
| 102
| 0.705416
| false
| 3.773679
| false
| false
| false
|
reinvantveer/edna-ld
|
etl/lib/MIFparser.py
|
1
|
2473
|
from osgeo import gdal
import signal
from osgeo import ogr
import json
gdal.UseExceptions()
"""
This module has a heavy dependency on the python GDAL package, which can be
a total pain in the ass to install, depending on your platform. But it is needed
for parsing the Mapinfo Interchange Format (MIF) files...
Support for Windows is easiest through the OSGeo4W installer
"""
class MIFparser:
"""
This class is responsible for reading MapInfo Interchange Format files.
They are recognizable by the .mif (upper or lowercase) file extension.
"""
# Catch segmentation faults
@staticmethod
def _sig_handler(signum, frame):
raise ValueError("segfault")
@staticmethod
def to_dict(file_path):
# TODO: write code to actually handle the error!
# signal.signal(signal.SIGSEGV, MIFparser._sig_handler)
wkt_features = [] # Initialize empty array of target features
try:
data_source = ogr.Open(file_path, 0)
except Exception as e:
raise ValueError(e)
data_source = gdal.ogr.Open(file_path, 0)
err = gdal.GetLastErrorMsg()
if err:
raise ValueError(err + ' on ' + file_path)
if not data_source:
raise ValueError('Unable to read data from file %s' % file_path)
layer = data_source.GetLayer()
err = gdal.GetLastErrorMsg()
if err:
raise ValueError(err + ' on ' + file_path)
for feature in layer:
# shortcut to dumping non-geometry attributes from feature to our dictionary
try:
geojson = feature.ExportToJson()
except Exception as e:
raise ValueError('Unable to extract features from file %s due to %s' % (file_path, e))
geojson_as_dict = json.loads(geojson)
wkt_feature = geojson_as_dict['properties']
# tack on the geometry as well-known text
geom = feature.GetGeometryRef()
err = gdal.GetLastErrorMsg()
if err:
raise ValueError(err + ' on ' + file_path)
if not geom:
raise ValueError('Unable to extract geometries from %s' % file_path)
wkt_feature['WKT'] = geom.ExportToWkt()
wkt_features.append(wkt_feature)
if not wkt_features:
raise ValueError('Unable to extract features from %s' % file_path)
return wkt_features
|
mit
| 209,170,776,351,899,870
| 29.9125
| 102
| 0.615042
| false
| 4.220137
| false
| false
| false
|
stephanie-wang/ray
|
python/ray/tune/schedulers/async_hyperband.py
|
1
|
7105
|
import logging
import numpy as np
from ray.tune.schedulers.trial_scheduler import FIFOScheduler, TrialScheduler
logger = logging.getLogger(__name__)
class AsyncHyperBandScheduler(FIFOScheduler):
"""Implements the Async Successive Halving.
This should provide similar theoretical performance as HyperBand but
avoid straggler issues that HyperBand faces. One implementation detail
is when using multiple brackets, trial allocation to bracket is done
randomly with over a softmax probability.
See https://arxiv.org/abs/1810.05934
Args:
time_attr (str): A training result attr to use for comparing time.
Note that you can pass in something non-temporal such as
`training_iteration` as a measure of progress, the only requirement
is that the attribute should increase monotonically.
metric (str): The training result objective value attribute. Stopping
procedures will use this attribute.
mode (str): One of {min, max}. Determines whether objective is
minimizing or maximizing the metric attribute.
max_t (float): max time units per trial. Trials will be stopped after
max_t time units (determined by time_attr) have passed.
grace_period (float): Only stop trials at least this old in time.
The units are the same as the attribute named by `time_attr`.
reduction_factor (float): Used to set halving rate and amount. This
is simply a unit-less scalar.
brackets (int): Number of brackets. Each bracket has a different
halving rate, specified by the reduction factor.
"""
def __init__(self,
time_attr="training_iteration",
reward_attr=None,
metric="episode_reward_mean",
mode="max",
max_t=100,
grace_period=1,
reduction_factor=4,
brackets=1):
assert max_t > 0, "Max (time_attr) not valid!"
assert max_t >= grace_period, "grace_period must be <= max_t!"
assert grace_period > 0, "grace_period must be positive!"
assert reduction_factor > 1, "Reduction Factor not valid!"
assert brackets > 0, "brackets must be positive!"
assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!"
if reward_attr is not None:
mode = "max"
metric = reward_attr
logger.warning(
"`reward_attr` is deprecated and will be removed in a future "
"version of Tune. "
"Setting `metric={}` and `mode=max`.".format(reward_attr))
FIFOScheduler.__init__(self)
self._reduction_factor = reduction_factor
self._max_t = max_t
self._trial_info = {} # Stores Trial -> Bracket
# Tracks state for new trial add
self._brackets = [
_Bracket(grace_period, max_t, reduction_factor, s)
for s in range(brackets)
]
self._counter = 0 # for
self._num_stopped = 0
self._metric = metric
if mode == "max":
self._metric_op = 1.
elif mode == "min":
self._metric_op = -1.
self._time_attr = time_attr
def on_trial_add(self, trial_runner, trial):
sizes = np.array([len(b._rungs) for b in self._brackets])
probs = np.e**(sizes - sizes.max())
normalized = probs / probs.sum()
idx = np.random.choice(len(self._brackets), p=normalized)
self._trial_info[trial.trial_id] = self._brackets[idx]
def on_trial_result(self, trial_runner, trial, result):
action = TrialScheduler.CONTINUE
if self._time_attr not in result or self._metric not in result:
return action
if result[self._time_attr] >= self._max_t:
action = TrialScheduler.STOP
else:
bracket = self._trial_info[trial.trial_id]
action = bracket.on_result(trial, result[self._time_attr],
self._metric_op * result[self._metric])
if action == TrialScheduler.STOP:
self._num_stopped += 1
return action
def on_trial_complete(self, trial_runner, trial, result):
if self._time_attr not in result or self._metric not in result:
return
bracket = self._trial_info[trial.trial_id]
bracket.on_result(trial, result[self._time_attr],
self._metric_op * result[self._metric])
del self._trial_info[trial.trial_id]
def on_trial_remove(self, trial_runner, trial):
del self._trial_info[trial.trial_id]
def debug_string(self):
out = "Using AsyncHyperBand: num_stopped={}".format(self._num_stopped)
out += "\n" + "\n".join([b.debug_str() for b in self._brackets])
return out
class _Bracket():
"""Bookkeeping system to track the cutoffs.
Rungs are created in reversed order so that we can more easily find
the correct rung corresponding to the current iteration of the result.
Example:
>>> b = _Bracket(1, 10, 2, 3)
>>> b.on_result(trial1, 1, 2) # CONTINUE
>>> b.on_result(trial2, 1, 4) # CONTINUE
>>> b.cutoff(b._rungs[-1][1]) == 3.0 # rungs are reversed
>>> b.on_result(trial3, 1, 1) # STOP
>>> b.cutoff(b._rungs[0][1]) == 2.0
"""
def __init__(self, min_t, max_t, reduction_factor, s):
self.rf = reduction_factor
MAX_RUNGS = int(np.log(max_t / min_t) / np.log(self.rf) - s + 1)
self._rungs = [(min_t * self.rf**(k + s), {})
for k in reversed(range(MAX_RUNGS))]
def cutoff(self, recorded):
if not recorded:
return None
return np.nanpercentile(
list(recorded.values()), (1 - 1 / self.rf) * 100)
def on_result(self, trial, cur_iter, cur_rew):
action = TrialScheduler.CONTINUE
for milestone, recorded in self._rungs:
if cur_iter < milestone or trial.trial_id in recorded:
continue
else:
cutoff = self.cutoff(recorded)
if cutoff is not None and cur_rew < cutoff:
action = TrialScheduler.STOP
if cur_rew is None:
logger.warning("Reward attribute is None! Consider"
" reporting using a different field.")
else:
recorded[trial.trial_id] = cur_rew
break
return action
def debug_str(self):
iters = " | ".join([
"Iter {:.3f}: {}".format(milestone, self.cutoff(recorded))
for milestone, recorded in self._rungs
])
return "Bracket: " + iters
ASHAScheduler = AsyncHyperBandScheduler
if __name__ == "__main__":
sched = AsyncHyperBandScheduler(
grace_period=1, max_t=10, reduction_factor=2)
print(sched.debug_string())
bracket = sched._brackets[0]
print(bracket.cutoff({str(i): i for i in range(20)}))
|
apache-2.0
| -8,235,873,777,404,223,000
| 38.692737
| 79
| 0.580859
| false
| 3.982623
| false
| false
| false
|
Bjwebb/detecting-clouds
|
test.py
|
1
|
1519
|
from utils import get_sidereal_time
from process import open_fits, flatten_max, DataProcessor
import dateutil.parser
import os, shutil
dp = DataProcessor()
dp.outdir = 'test/out'
dp.verbose = 1
#date_obs = '2011-05-25T06:00:10'
date_obs = '2012-02-29T10:37:12'
name = date_obs + '.fits'
path = os.path.join('sym', name[0:4], name[5:7], name[8:10])
dp.process_file(os.path.join(path, name))
"""
dt = dateutil.parser.parse(name.split('.')[0])
s = get_sidereal_time(dt).seconds
path_end = os.path.join(*[ unicode(x).zfill(2) for x in [ s/3600, (s/60)%60 ] ])
fname = os.path.join('out', 'fits', 'sid', path_end, 'total.fits')
tdata = open_fits(fname)
night = os.listdir(os.path.join('sid', path_end))
for i in [100, 250, 500, 1000, 3000, 4000, 5000, 2000]:
dp.output('total', tdata, image_filter=flatten_max(i*len(night)))
shutil.copyfile(os.path.join('test','out','png','total.png'),
os.path.join('test', 'total{0}.png').format(i))
"""
from django.template import Context, Template
t = Template(open(os.path.join('clouds','templates','clouds','image.html')).read())
from catlib import parse_cat
point_list = map(lambda (i,row):row, parse_cat(os.path.join('test','out','cat',path,date_obs+'.cat')).iterrows())
with open(os.path.join('test',date_obs+'.html'), 'w') as out:
out.write(t.render(Context({'point_list': point_list,
'object': {'get_url': 'sym/'+date_obs[:4]+'/'+date_obs[5:7]+'/'+date_obs[8:10]+'/'+date_obs }
})))
|
mit
| -2,582,954,058,109,025,000
| 36.04878
| 125
| 0.626728
| false
| 2.828678
| false
| false
| false
|
sauloal/cnidaria
|
scripts/venv/lib/python2.7/site-packages/pandas/core/reshape.py
|
1
|
38451
|
# pylint: disable=E1101,E1103
# pylint: disable=W0703,W0622,W0613,W0201
from pandas.compat import range, zip
from pandas import compat
import itertools
import numpy as np
from pandas.core.series import Series
from pandas.core.frame import DataFrame
from pandas.core.sparse import SparseDataFrame, SparseSeries
from pandas.sparse.array import SparseArray
from pandas._sparse import IntIndex
from pandas.core.categorical import Categorical
from pandas.core.common import (notnull, _ensure_platform_int, _maybe_promote,
isnull)
from pandas.core.groupby import get_group_index, _compress_group_index
import pandas.core.common as com
import pandas.algos as algos
from pandas.core.index import MultiIndex, _get_na_value
class _Unstacker(object):
"""
Helper class to unstack data / pivot with multi-level index
Parameters
----------
level : int or str, default last level
Level to "unstack". Accepts a name for the level.
Examples
--------
>>> import pandas as pd
>>> index = pd.MultiIndex.from_tuples([('one', 'a'), ('one', 'b'),
... ('two', 'a'), ('two', 'b')])
>>> s = pd.Series(np.arange(1.0, 5.0), index=index)
>>> s
one a 1
b 2
two a 3
b 4
dtype: float64
>>> s.unstack(level=-1)
a b
one 1 2
two 3 4
>>> s.unstack(level=0)
one two
a 1 2
b 3 4
Returns
-------
unstacked : DataFrame
"""
def __init__(self, values, index, level=-1, value_columns=None):
self.is_categorical = None
if values.ndim == 1:
if isinstance(values, Categorical):
self.is_categorical = values
values = np.array(values)
values = values[:, np.newaxis]
self.values = values
self.value_columns = value_columns
if value_columns is None and values.shape[1] != 1: # pragma: no cover
raise ValueError('must pass column labels for multi-column data')
self.index = index
if isinstance(self.index, MultiIndex):
if index._reference_duplicate_name(level):
msg = ("Ambiguous reference to {0}. The index "
"names are not unique.".format(level))
raise ValueError(msg)
self.level = self.index._get_level_number(level)
# when index includes `nan`, need to lift levels/strides by 1
self.lift = 1 if -1 in self.index.labels[self.level] else 0
self.new_index_levels = list(index.levels)
self.new_index_names = list(index.names)
self.removed_name = self.new_index_names.pop(self.level)
self.removed_level = self.new_index_levels.pop(self.level)
self._make_sorted_values_labels()
self._make_selectors()
def _make_sorted_values_labels(self):
v = self.level
labs = list(self.index.labels)
levs = list(self.index.levels)
to_sort = labs[:v] + labs[v + 1:] + [labs[v]]
sizes = [len(x) for x in levs[:v] + levs[v + 1:] + [levs[v]]]
comp_index, obs_ids = get_compressed_ids(to_sort, sizes)
ngroups = len(obs_ids)
indexer = algos.groupsort_indexer(comp_index, ngroups)[0]
indexer = _ensure_platform_int(indexer)
self.sorted_values = com.take_nd(self.values, indexer, axis=0)
self.sorted_labels = [l.take(indexer) for l in to_sort]
def _make_selectors(self):
new_levels = self.new_index_levels
# make the mask
remaining_labels = self.sorted_labels[:-1]
level_sizes = [len(x) for x in new_levels]
comp_index, obs_ids = get_compressed_ids(remaining_labels, level_sizes)
ngroups = len(obs_ids)
comp_index = _ensure_platform_int(comp_index)
stride = self.index.levshape[self.level] + self.lift
self.full_shape = ngroups, stride
selector = self.sorted_labels[-1] + stride * comp_index + self.lift
mask = np.zeros(np.prod(self.full_shape), dtype=bool)
mask.put(selector, True)
if mask.sum() < len(self.index):
raise ValueError('Index contains duplicate entries, '
'cannot reshape')
self.group_index = comp_index
self.mask = mask
self.unique_groups = obs_ids
self.compressor = comp_index.searchsorted(np.arange(ngroups))
def get_result(self):
# TODO: find a better way than this masking business
values, value_mask = self.get_new_values()
columns = self.get_new_columns()
index = self.get_new_index()
# filter out missing levels
if values.shape[1] > 0:
col_inds, obs_ids = _compress_group_index(self.sorted_labels[-1])
# rare case, level values not observed
if len(obs_ids) < self.full_shape[1]:
inds = (value_mask.sum(0) > 0).nonzero()[0]
values = com.take_nd(values, inds, axis=1)
columns = columns[inds]
# may need to coerce categoricals here
if self.is_categorical is not None:
values = [ Categorical.from_array(values[:,i],
categories=self.is_categorical.categories,
ordered=True)
for i in range(values.shape[-1]) ]
return DataFrame(values, index=index, columns=columns)
def get_new_values(self):
values = self.values
# place the values
length, width = self.full_shape
stride = values.shape[1]
result_width = width * stride
result_shape = (length, result_width)
# if our mask is all True, then we can use our existing dtype
if self.mask.all():
dtype = values.dtype
new_values = np.empty(result_shape, dtype=dtype)
else:
dtype, fill_value = _maybe_promote(values.dtype)
new_values = np.empty(result_shape, dtype=dtype)
new_values.fill(fill_value)
new_mask = np.zeros(result_shape, dtype=bool)
# is there a simpler / faster way of doing this?
for i in range(values.shape[1]):
chunk = new_values[:, i * width: (i + 1) * width]
mask_chunk = new_mask[:, i * width: (i + 1) * width]
chunk.flat[self.mask] = self.sorted_values[:, i]
mask_chunk.flat[self.mask] = True
return new_values, new_mask
def get_new_columns(self):
if self.value_columns is None:
if self.lift == 0:
return self.removed_level
lev = self.removed_level
return lev.insert(0, _get_na_value(lev.dtype.type))
stride = len(self.removed_level) + self.lift
width = len(self.value_columns)
propagator = np.repeat(np.arange(width), stride)
if isinstance(self.value_columns, MultiIndex):
new_levels = self.value_columns.levels + (self.removed_level,)
new_names = self.value_columns.names + (self.removed_name,)
new_labels = [lab.take(propagator)
for lab in self.value_columns.labels]
else:
new_levels = [self.value_columns, self.removed_level]
new_names = [self.value_columns.name, self.removed_name]
new_labels = [propagator]
new_labels.append(np.tile(np.arange(stride) - self.lift, width))
return MultiIndex(levels=new_levels, labels=new_labels,
names=new_names, verify_integrity=False)
def get_new_index(self):
result_labels = [lab.take(self.compressor)
for lab in self.sorted_labels[:-1]]
# construct the new index
if len(self.new_index_levels) == 1:
lev, lab = self.new_index_levels[0], result_labels[0]
if (lab == -1).any():
lev = lev.insert(len(lev), _get_na_value(lev.dtype.type))
return lev.take(lab)
return MultiIndex(levels=self.new_index_levels,
labels=result_labels,
names=self.new_index_names,
verify_integrity=False)
def _unstack_multiple(data, clocs):
from pandas.core.groupby import decons_obs_group_ids
if len(clocs) == 0:
return data
# NOTE: This doesn't deal with hierarchical columns yet
index = data.index
clocs = [index._get_level_number(i) for i in clocs]
rlocs = [i for i in range(index.nlevels) if i not in clocs]
clevels = [index.levels[i] for i in clocs]
clabels = [index.labels[i] for i in clocs]
cnames = [index.names[i] for i in clocs]
rlevels = [index.levels[i] for i in rlocs]
rlabels = [index.labels[i] for i in rlocs]
rnames = [index.names[i] for i in rlocs]
shape = [len(x) for x in clevels]
group_index = get_group_index(clabels, shape, sort=False, xnull=False)
comp_ids, obs_ids = _compress_group_index(group_index, sort=False)
recons_labels = decons_obs_group_ids(comp_ids,
obs_ids, shape, clabels, xnull=False)
dummy_index = MultiIndex(levels=rlevels + [obs_ids],
labels=rlabels + [comp_ids],
names=rnames + ['__placeholder__'],
verify_integrity=False)
if isinstance(data, Series):
dummy = Series(data.values, index=dummy_index)
unstacked = dummy.unstack('__placeholder__')
new_levels = clevels
new_names = cnames
new_labels = recons_labels
else:
if isinstance(data.columns, MultiIndex):
result = data
for i in range(len(clocs)):
val = clocs[i]
result = result.unstack(val)
clocs = [val if i > val else val - 1 for val in clocs]
return result
dummy = DataFrame(data.values, index=dummy_index,
columns=data.columns)
unstacked = dummy.unstack('__placeholder__')
if isinstance(unstacked, Series):
unstcols = unstacked.index
else:
unstcols = unstacked.columns
new_levels = [unstcols.levels[0]] + clevels
new_names = [data.columns.name] + cnames
new_labels = [unstcols.labels[0]]
for rec in recons_labels:
new_labels.append(rec.take(unstcols.labels[-1]))
new_columns = MultiIndex(levels=new_levels, labels=new_labels,
names=new_names, verify_integrity=False)
if isinstance(unstacked, Series):
unstacked.index = new_columns
else:
unstacked.columns = new_columns
return unstacked
def pivot(self, index=None, columns=None, values=None):
"""
See DataFrame.pivot
"""
if values is None:
indexed = self.set_index([index, columns])
return indexed.unstack(columns)
else:
indexed = Series(self[values].values,
index=MultiIndex.from_arrays([self[index],
self[columns]]))
return indexed.unstack(columns)
def pivot_simple(index, columns, values):
"""
Produce 'pivot' table based on 3 columns of this DataFrame.
Uses unique values from index / columns and fills with values.
Parameters
----------
index : ndarray
Labels to use to make new frame's index
columns : ndarray
Labels to use to make new frame's columns
values : ndarray
Values to use for populating new frame's values
Notes
-----
Obviously, all 3 of the input arguments must have the same length
Returns
-------
DataFrame
"""
if (len(index) != len(columns)) or (len(columns) != len(values)):
raise AssertionError('Length of index, columns, and values must be the'
' same')
if len(index) == 0:
return DataFrame(index=[])
hindex = MultiIndex.from_arrays([index, columns])
series = Series(values.ravel(), index=hindex)
series = series.sortlevel(0)
return series.unstack()
def _slow_pivot(index, columns, values):
"""
Produce 'pivot' table based on 3 columns of this DataFrame.
Uses unique values from index / columns and fills with values.
Parameters
----------
index : string or object
Column name to use to make new frame's index
columns : string or object
Column name to use to make new frame's columns
values : string or object
Column name to use for populating new frame's values
Could benefit from some Cython here.
"""
tree = {}
for i, (idx, col) in enumerate(zip(index, columns)):
if col not in tree:
tree[col] = {}
branch = tree[col]
branch[idx] = values[i]
return DataFrame(tree)
def unstack(obj, level):
if isinstance(level, (tuple, list)):
return _unstack_multiple(obj, level)
if isinstance(obj, DataFrame):
if isinstance(obj.index, MultiIndex):
return _unstack_frame(obj, level)
else:
return obj.T.stack(dropna=False)
else:
unstacker = _Unstacker(obj.values, obj.index, level=level)
return unstacker.get_result()
def _unstack_frame(obj, level):
from pandas.core.internals import BlockManager, make_block
if obj._is_mixed_type:
unstacker = _Unstacker(np.empty(obj.shape, dtype=bool), # dummy
obj.index, level=level,
value_columns=obj.columns)
new_columns = unstacker.get_new_columns()
new_index = unstacker.get_new_index()
new_axes = [new_columns, new_index]
new_blocks = []
mask_blocks = []
for blk in obj._data.blocks:
blk_items = obj._data.items[blk.mgr_locs.indexer]
bunstacker = _Unstacker(blk.values.T, obj.index, level=level,
value_columns=blk_items)
new_items = bunstacker.get_new_columns()
new_placement = new_columns.get_indexer(new_items)
new_values, mask = bunstacker.get_new_values()
mblk = make_block(mask.T, placement=new_placement)
mask_blocks.append(mblk)
newb = make_block(new_values.T, placement=new_placement)
new_blocks.append(newb)
result = DataFrame(BlockManager(new_blocks, new_axes))
mask_frame = DataFrame(BlockManager(mask_blocks, new_axes))
return result.ix[:, mask_frame.sum(0) > 0]
else:
unstacker = _Unstacker(obj.values, obj.index, level=level,
value_columns=obj.columns)
return unstacker.get_result()
def get_compressed_ids(labels, sizes):
from pandas.core.groupby import get_group_index
ids = get_group_index(labels, sizes, sort=True, xnull=False)
return _compress_group_index(ids, sort=True)
def stack(frame, level=-1, dropna=True):
"""
Convert DataFrame to Series with multi-level Index. Columns become the
second level of the resulting hierarchical index
Returns
-------
stacked : Series
"""
N, K = frame.shape
if isinstance(frame.columns, MultiIndex):
if frame.columns._reference_duplicate_name(level):
msg = ("Ambiguous reference to {0}. The column "
"names are not unique.".format(level))
raise ValueError(msg)
# Will also convert negative level numbers and check if out of bounds.
level_num = frame.columns._get_level_number(level)
if isinstance(frame.columns, MultiIndex):
return _stack_multi_columns(frame, level_num=level_num, dropna=dropna)
elif isinstance(frame.index, MultiIndex):
new_levels = list(frame.index.levels)
new_levels.append(frame.columns)
new_labels = [lab.repeat(K) for lab in frame.index.labels]
new_labels.append(np.tile(np.arange(K), N).ravel())
new_names = list(frame.index.names)
new_names.append(frame.columns.name)
new_index = MultiIndex(levels=new_levels, labels=new_labels,
names=new_names, verify_integrity=False)
else:
ilabels = np.arange(N).repeat(K)
clabels = np.tile(np.arange(K), N).ravel()
new_index = MultiIndex(levels=[frame.index, frame.columns],
labels=[ilabels, clabels],
names=[frame.index.name, frame.columns.name],
verify_integrity=False)
new_values = frame.values.ravel()
if dropna:
mask = notnull(new_values)
new_values = new_values[mask]
new_index = new_index[mask]
return Series(new_values, index=new_index)
def stack_multiple(frame, level, dropna=True):
# If all passed levels match up to column names, no
# ambiguity about what to do
if all(lev in frame.columns.names for lev in level):
result = frame
for lev in level:
result = stack(result, lev, dropna=dropna)
# Otherwise, level numbers may change as each successive level is stacked
elif all(isinstance(lev, int) for lev in level):
# As each stack is done, the level numbers decrease, so we need
# to account for that when level is a sequence of ints
result = frame
# _get_level_number() checks level numbers are in range and converts
# negative numbers to positive
level = [frame.columns._get_level_number(lev) for lev in level]
# Can't iterate directly through level as we might need to change
# values as we go
for index in range(len(level)):
lev = level[index]
result = stack(result, lev, dropna=dropna)
# Decrement all level numbers greater than current, as these
# have now shifted down by one
updated_level = []
for other in level:
if other > lev:
updated_level.append(other - 1)
else:
updated_level.append(other)
level = updated_level
else:
raise ValueError("level should contain all level names or all level numbers, "
"not a mixture of the two.")
return result
def _stack_multi_columns(frame, level_num=-1, dropna=True):
def _convert_level_number(level_num, columns):
"""
Logic for converting the level number to something
we can safely pass to swaplevel:
We generally want to convert the level number into
a level name, except when columns do not have names,
in which case we must leave as a level number
"""
if level_num in columns.names:
return columns.names[level_num]
else:
if columns.names[level_num] is None:
return level_num
else:
return columns.names[level_num]
this = frame.copy()
# this makes life much simpler
if level_num != frame.columns.nlevels - 1:
# roll levels to put selected level at end
roll_columns = this.columns
for i in range(level_num, frame.columns.nlevels - 1):
# Need to check if the ints conflict with level names
lev1 = _convert_level_number(i, roll_columns)
lev2 = _convert_level_number(i + 1, roll_columns)
roll_columns = roll_columns.swaplevel(lev1, lev2)
this.columns = roll_columns
if not this.columns.is_lexsorted():
# Workaround the edge case where 0 is one of the column names,
# which interferes with trying to sort based on the first
# level
level_to_sort = _convert_level_number(0, this.columns)
this = this.sortlevel(level_to_sort, axis=1)
# tuple list excluding level for grouping columns
if len(frame.columns.levels) > 2:
tuples = list(zip(*[
lev.take(lab) for lev, lab in
zip(this.columns.levels[:-1], this.columns.labels[:-1])
]))
unique_groups = [key for key, _ in itertools.groupby(tuples)]
new_names = this.columns.names[:-1]
new_columns = MultiIndex.from_tuples(unique_groups, names=new_names)
else:
new_columns = unique_groups = this.columns.levels[0]
# time to ravel the values
new_data = {}
level_vals = this.columns.levels[-1]
level_labels = sorted(set(this.columns.labels[-1]))
level_vals_used = level_vals[level_labels]
levsize = len(level_labels)
drop_cols = []
for key in unique_groups:
loc = this.columns.get_loc(key)
slice_len = loc.stop - loc.start
# can make more efficient?
if slice_len == 0:
drop_cols.append(key)
continue
elif slice_len != levsize:
chunk = this.ix[:, this.columns[loc]]
chunk.columns = level_vals.take(chunk.columns.labels[-1])
value_slice = chunk.reindex(columns=level_vals_used).values
else:
if frame._is_mixed_type:
value_slice = this.ix[:, this.columns[loc]].values
else:
value_slice = this.values[:, loc]
new_data[key] = value_slice.ravel()
if len(drop_cols) > 0:
new_columns = new_columns.difference(drop_cols)
N = len(this)
if isinstance(this.index, MultiIndex):
new_levels = list(this.index.levels)
new_names = list(this.index.names)
new_labels = [lab.repeat(levsize) for lab in this.index.labels]
else:
new_levels = [this.index]
new_labels = [np.arange(N).repeat(levsize)]
new_names = [this.index.name] # something better?
new_levels.append(frame.columns.levels[level_num])
new_labels.append(np.tile(level_labels, N))
new_names.append(frame.columns.names[level_num])
new_index = MultiIndex(levels=new_levels, labels=new_labels,
names=new_names, verify_integrity=False)
result = DataFrame(new_data, index=new_index, columns=new_columns)
# more efficient way to go about this? can do the whole masking biz but
# will only save a small amount of time...
if dropna:
result = result.dropna(axis=0, how='all')
return result
def melt(frame, id_vars=None, value_vars=None,
var_name=None, value_name='value', col_level=None):
"""
"Unpivots" a DataFrame from wide format to long format, optionally leaving
identifier variables set.
This function is useful to massage a DataFrame into a format where one
or more columns are identifier variables (`id_vars`), while all other
columns, considered measured variables (`value_vars`), are "unpivoted" to
the row axis, leaving just two non-identifier columns, 'variable' and
'value'.
Parameters
----------
frame : DataFrame
id_vars : tuple, list, or ndarray, optional
Column(s) to use as identifier variables.
value_vars : tuple, list, or ndarray, optional
Column(s) to unpivot. If not specified, uses all columns that
are not set as `id_vars`.
var_name : scalar
Name to use for the 'variable' column. If None it uses
``frame.columns.name`` or 'variable'.
value_name : scalar, default 'value'
Name to use for the 'value' column.
col_level : int or string, optional
If columns are a MultiIndex then use this level to melt.
See also
--------
pivot_table
DataFrame.pivot
Examples
--------
>>> import pandas as pd
>>> df = pd.DataFrame({'A': {0: 'a', 1: 'b', 2: 'c'},
... 'B': {0: 1, 1: 3, 2: 5},
... 'C': {0: 2, 1: 4, 2: 6}})
>>> df
A B C
0 a 1 2
1 b 3 4
2 c 5 6
>>> pd.melt(df, id_vars=['A'], value_vars=['B'])
A variable value
0 a B 1
1 b B 3
2 c B 5
>>> pd.melt(df, id_vars=['A'], value_vars=['B', 'C'])
A variable value
0 a B 1
1 b B 3
2 c B 5
3 a C 2
4 b C 4
5 c C 6
The names of 'variable' and 'value' columns can be customized:
>>> pd.melt(df, id_vars=['A'], value_vars=['B'],
... var_name='myVarname', value_name='myValname')
A myVarname myValname
0 a B 1
1 b B 3
2 c B 5
If you have multi-index columns:
>>> df.columns = [list('ABC'), list('DEF')]
>>> df
A B C
D E F
0 a 1 2
1 b 3 4
2 c 5 6
>>> pd.melt(df, col_level=0, id_vars=['A'], value_vars=['B'])
A variable value
0 a B 1
1 b B 3
2 c B 5
>>> pd.melt(df, id_vars=[('A', 'D')], value_vars=[('B', 'E')])
(A, D) variable_0 variable_1 value
0 a B E 1
1 b B E 3
2 c B E 5
"""
# TODO: what about the existing index?
if id_vars is not None:
if not isinstance(id_vars, (tuple, list, np.ndarray)):
id_vars = [id_vars]
else:
id_vars = list(id_vars)
else:
id_vars = []
if value_vars is not None:
if not isinstance(value_vars, (tuple, list, np.ndarray)):
value_vars = [value_vars]
frame = frame.ix[:, id_vars + value_vars]
else:
frame = frame.copy()
if col_level is not None: # allow list or other?
# frame is a copy
frame.columns = frame.columns.get_level_values(col_level)
if var_name is None:
if isinstance(frame.columns, MultiIndex):
if len(frame.columns.names) == len(set(frame.columns.names)):
var_name = frame.columns.names
else:
var_name = ['variable_%s' % i for i in
range(len(frame.columns.names))]
else:
var_name = [frame.columns.name if frame.columns.name is not None
else 'variable']
if isinstance(var_name, compat.string_types):
var_name = [var_name]
N, K = frame.shape
K -= len(id_vars)
mdata = {}
for col in id_vars:
mdata[col] = np.tile(frame.pop(col).values, K)
mcolumns = id_vars + var_name + [value_name]
mdata[value_name] = frame.values.ravel('F')
for i, col in enumerate(var_name):
# asanyarray will keep the columns as an Index
mdata[col] = np.asanyarray(frame.columns.get_level_values(i)).repeat(N)
return DataFrame(mdata, columns=mcolumns)
def lreshape(data, groups, dropna=True, label=None):
"""
Reshape long-format data to wide. Generalized inverse of DataFrame.pivot
Parameters
----------
data : DataFrame
groups : dict
{new_name : list_of_columns}
dropna : boolean, default True
Examples
--------
>>> import pandas as pd
>>> data = pd.DataFrame({'hr1': [514, 573], 'hr2': [545, 526],
... 'team': ['Red Sox', 'Yankees'],
... 'year1': [2007, 2008], 'year2': [2008, 2008]})
>>> data
hr1 hr2 team year1 year2
0 514 545 Red Sox 2007 2008
1 573 526 Yankees 2007 2008
>>> pd.lreshape(data, {'year': ['year1', 'year2'], 'hr': ['hr1', 'hr2']})
team hr year
0 Red Sox 514 2007
1 Yankees 573 2007
2 Red Sox 545 2008
3 Yankees 526 2008
Returns
-------
reshaped : DataFrame
"""
if isinstance(groups, dict):
keys = list(groups.keys())
values = list(groups.values())
else:
keys, values = zip(*groups)
all_cols = list(set.union(*[set(x) for x in values]))
id_cols = list(data.columns.difference(all_cols))
K = len(values[0])
for seq in values:
if len(seq) != K:
raise ValueError('All column lists must be same length')
mdata = {}
pivot_cols = []
for target, names in zip(keys, values):
mdata[target] = com._concat_compat([data[col].values for col in names])
pivot_cols.append(target)
for col in id_cols:
mdata[col] = np.tile(data[col].values, K)
if dropna:
mask = np.ones(len(mdata[pivot_cols[0]]), dtype=bool)
for c in pivot_cols:
mask &= notnull(mdata[c])
if not mask.all():
mdata = dict((k, v[mask]) for k, v in compat.iteritems(mdata))
return DataFrame(mdata, columns=id_cols + pivot_cols)
def wide_to_long(df, stubnames, i, j):
"""
Wide panel to long format. Less flexible but more user-friendly than melt.
Parameters
----------
df : DataFrame
The wide-format DataFrame
stubnames : list
A list of stub names. The wide format variables are assumed to
start with the stub names.
i : str
The name of the id variable.
j : str
The name of the subobservation variable.
stubend : str
Regex to match for the end of the stubs.
Returns
-------
DataFrame
A DataFrame that contains each stub name as a variable as well as
variables for i and j.
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>> np.random.seed(123)
>>> df = pd.DataFrame({"A1970" : {0 : "a", 1 : "b", 2 : "c"},
... "A1980" : {0 : "d", 1 : "e", 2 : "f"},
... "B1970" : {0 : 2.5, 1 : 1.2, 2 : .7},
... "B1980" : {0 : 3.2, 1 : 1.3, 2 : .1},
... "X" : dict(zip(range(3), np.random.randn(3)))
... })
>>> df["id"] = df.index
>>> df
A1970 A1980 B1970 B1980 X id
0 a d 2.5 3.2 -1.085631 0
1 b e 1.2 1.3 0.997345 1
2 c f 0.7 0.1 0.282978 2
>>> wide_to_long(df, ["A", "B"], i="id", j="year")
X A B
id year
0 1970 -1.085631 a 2.5
1 1970 0.997345 b 1.2
2 1970 0.282978 c 0.7
0 1980 -1.085631 d 3.2
1 1980 0.997345 e 1.3
2 1980 0.282978 f 0.1
Notes
-----
All extra variables are treated as extra id variables. This simply uses
`pandas.melt` under the hood, but is hard-coded to "do the right thing"
in a typicaly case.
"""
def get_var_names(df, regex):
return df.filter(regex=regex).columns.tolist()
def melt_stub(df, stub, i, j):
varnames = get_var_names(df, "^" + stub)
newdf = melt(df, id_vars=i, value_vars=varnames, value_name=stub,
var_name=j)
newdf_j = newdf[j].str.replace(stub, "")
try:
newdf_j = newdf_j.astype(int)
except ValueError:
pass
newdf[j] = newdf_j
return newdf
id_vars = get_var_names(df, "^(?!%s)" % "|".join(stubnames))
if i not in id_vars:
id_vars += [i]
newdf = melt_stub(df, stubnames[0], id_vars, j)
for stub in stubnames[1:]:
new = melt_stub(df, stub, id_vars, j)
newdf = newdf.merge(new, how="outer", on=id_vars + [j], copy=False)
return newdf.set_index([i, j])
def get_dummies(data, prefix=None, prefix_sep='_', dummy_na=False,
columns=None, sparse=False):
"""
Convert categorical variable into dummy/indicator variables
Parameters
----------
data : array-like, Series, or DataFrame
prefix : string, list of strings, or dict of strings, default None
String to append DataFrame column names
Pass a list with length equal to the number of columns
when calling get_dummies on a DataFrame. Alternativly, `prefix`
can be a dictionary mapping column names to prefixes.
prefix_sep : string, default '_'
If appending prefix, separator/delimiter to use. Or pass a
list or dictionary as with `prefix.`
dummy_na : bool, default False
Add a column to indicate NaNs, if False NaNs are ignored.
columns : list-like, default None
Column names in the DataFrame to be encoded.
If `columns` is None then all the columns with
`object` or `category` dtype will be converted.
sparse : bool, default False
Whether the returned DataFrame should be sparse or not.
Returns
-------
dummies : DataFrame
Examples
--------
>>> import pandas as pd
>>> s = pd.Series(list('abca'))
>>> get_dummies(s)
a b c
0 1 0 0
1 0 1 0
2 0 0 1
3 1 0 0
>>> s1 = ['a', 'b', np.nan]
>>> get_dummies(s1)
a b
0 1 0
1 0 1
2 0 0
>>> get_dummies(s1, dummy_na=True)
a b NaN
0 1 0 0
1 0 1 0
2 0 0 1
>>> df = DataFrame({'A': ['a', 'b', 'a'], 'B': ['b', 'a', 'c'],
'C': [1, 2, 3]})
>>> get_dummies(df, prefix=['col1', 'col2']):
C col1_a col1_b col2_a col2_b col2_c
0 1 1 0 0 1 0
1 2 0 1 1 0 0
2 3 1 0 0 0 1
See also ``Series.str.get_dummies``.
"""
from pandas.tools.merge import concat
from itertools import cycle
if isinstance(data, DataFrame):
# determine columns being encoded
if columns is None:
columns_to_encode = data.select_dtypes(include=['object',
'category']).columns
else:
columns_to_encode = columns
# validate prefixes and separator to avoid silently dropping cols
def check_len(item, name):
length_msg = ("Length of '{0}' ({1}) did "
"not match the length of the columns "
"being encoded ({2}).")
if com.is_list_like(item):
if not len(item) == len(columns_to_encode):
raise ValueError(length_msg.format(name, len(item),
len(columns_to_encode)))
check_len(prefix, 'prefix')
check_len(prefix_sep, 'prefix_sep')
if isinstance(prefix, compat.string_types):
prefix = cycle([prefix])
if isinstance(prefix, dict):
prefix = [prefix[col] for col in columns_to_encode]
if prefix is None:
prefix = columns_to_encode
# validate separators
if isinstance(prefix_sep, compat.string_types):
prefix_sep = cycle([prefix_sep])
elif isinstance(prefix_sep, dict):
prefix_sep = [prefix_sep[col] for col in columns_to_encode]
result = data.drop(columns_to_encode, axis=1)
with_dummies = [result]
for (col, pre, sep) in zip(columns_to_encode, prefix, prefix_sep):
dummy = _get_dummies_1d(data[col], prefix=pre, prefix_sep=sep,
dummy_na=dummy_na, sparse=sparse)
with_dummies.append(dummy)
result = concat(with_dummies, axis=1)
else:
result = _get_dummies_1d(data, prefix, prefix_sep, dummy_na,
sparse=sparse)
return result
def _get_dummies_1d(data, prefix, prefix_sep='_', dummy_na=False, sparse=False):
# Series avoids inconsistent NaN handling
cat = Categorical.from_array(Series(data), ordered=True)
levels = cat.categories
# if all NaN
if not dummy_na and len(levels) == 0:
if isinstance(data, Series):
index = data.index
else:
index = np.arange(len(data))
if not sparse:
return DataFrame(index=index)
else:
return SparseDataFrame(index=index)
codes = cat.codes.copy()
if dummy_na:
codes[codes == -1] = len(cat.categories)
levels = np.append(cat.categories, np.nan)
number_of_cols = len(levels)
if prefix is not None:
dummy_cols = ['%s%s%s' % (prefix, prefix_sep, v)
for v in levels]
else:
dummy_cols = levels
if isinstance(data, Series):
index = data.index
else:
index = None
if sparse:
sparse_series = {}
N = len(data)
sp_indices = [ [] for _ in range(len(dummy_cols)) ]
for ndx, code in enumerate(codes):
if code == -1:
# Blank entries if not dummy_na and code == -1, #GH4446
continue
sp_indices[code].append(ndx)
for col, ixs in zip(dummy_cols, sp_indices):
sarr = SparseArray(np.ones(len(ixs)), sparse_index=IntIndex(N, ixs),
fill_value=0)
sparse_series[col] = SparseSeries(data=sarr, index=index)
return SparseDataFrame(sparse_series, index=index, columns=dummy_cols)
else:
dummy_mat = np.eye(number_of_cols).take(codes, axis=0)
if not dummy_na:
# reset NaN GH4446
dummy_mat[codes == -1] = 0
return DataFrame(dummy_mat, index=index, columns=dummy_cols)
def make_axis_dummies(frame, axis='minor', transform=None):
"""
Construct 1-0 dummy variables corresponding to designated axis
labels
Parameters
----------
frame : DataFrame
axis : {'major', 'minor'}, default 'minor'
transform : function, default None
Function to apply to axis labels first. For example, to
get "day of week" dummies in a time series regression
you might call::
make_axis_dummies(panel, axis='major',
transform=lambda d: d.weekday())
Returns
-------
dummies : DataFrame
Column names taken from chosen axis
"""
numbers = {
'major': 0,
'minor': 1
}
num = numbers.get(axis, axis)
items = frame.index.levels[num]
labels = frame.index.labels[num]
if transform is not None:
mapped_items = items.map(transform)
cat = Categorical.from_array(mapped_items.take(labels), ordered=True)
labels = cat.codes
items = cat.categories
values = np.eye(len(items), dtype=float)
values = values.take(labels, axis=0)
return DataFrame(values, columns=items, index=frame.index)
|
mit
| 5,223,546,128,094,792,000
| 32.233362
| 88
| 0.565369
| false
| 3.710054
| false
| false
| false
|
projectatomic/osbs-client
|
tests/cli/test_capture.py
|
1
|
1660
|
"""
Copyright (c) 2015, 2019 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import absolute_import
import json
import os
import pytest
from osbs.constants import DEFAULT_NAMESPACE
from osbs.cli.capture import setup_json_capture
from tests.constants import TEST_BUILD
@pytest.fixture # noqa
def osbs_with_capture(osbs, tmpdir):
setup_json_capture(osbs, osbs.os_conf, str(tmpdir))
return osbs
def test_json_capture_no_watch(osbs_with_capture, tmpdir):
for visit in ["000", "001"]:
osbs_with_capture.list_builds()
filename = "get-build.openshift.io_v1_namespaces_{n}_builds_-{v}.json"
path = os.path.join(str(tmpdir), filename.format(n=DEFAULT_NAMESPACE,
v=visit))
assert os.access(path, os.R_OK)
with open(path) as fp:
obj = json.load(fp)
assert obj
def test_json_capture_watch(osbs_with_capture, tmpdir):
# Take the first two yielded values (fresh object, update)
# PyCQA/pylint#2731 fixed in 2.4.4, so noqa
for _ in zip(range(2), # pylint: disable=W1638
osbs_with_capture.os.watch_resource('builds', TEST_BUILD)):
pass
filename = "get-build.openshift.io_v1_watch_namespaces_{n}_builds_{b}_-000-000.json"
path = os.path.join(str(tmpdir), filename.format(n=DEFAULT_NAMESPACE,
b=TEST_BUILD))
assert os.access(path, os.R_OK)
with open(path) as fp:
obj = json.load(fp)
assert obj
|
bsd-3-clause
| -4,635,834,311,523,012,000
| 30.320755
| 88
| 0.633133
| false
| 3.509514
| true
| false
| false
|
qPCR4vir/orange3
|
Orange/widgets/visualize/owboxplot.py
|
1
|
30964
|
# -*- coding: utf-8 -*-
import sys
import math
import itertools
import numpy as np
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4.QtGui import QSizePolicy
import scipy.special
import Orange.data
from Orange.statistics import contingency, distribution
from Orange.widgets import widget, gui
from Orange.widgets.settings import (Setting, DomainContextHandler,
ContextSetting)
from Orange.widgets.utils import datacaching, vartype
def compute_scale(min_, max_):
if min_ == max_:
return math.floor(min_), 1
magnitude = int(3 * math.log10(abs(max_ - min_)) + 1)
if magnitude % 3 == 0:
first_place = 1
elif magnitude % 3 == 1:
first_place = 2
else:
first_place = 5
magnitude = magnitude // 3 - 1
step = first_place * pow(10, magnitude)
first_val = math.ceil(min_ / step) * step
return first_val, step
class BoxData:
def __init__(self, dist):
self.dist = dist
self.N = N = np.sum(dist[1])
if N == 0:
return
self.a_min = float(dist[0, 0])
self.a_max = float(dist[0, -1])
self.mean = float(np.sum(dist[0] * dist[1]) / N)
self.var = float(np.sum(dist[1] * (dist[0] - self.mean) ** 2) / N)
self.dev = math.sqrt(self.var)
s = 0
thresholds = [N / 4, N / 2, N / 4 * 3]
thresh_i = 0
q = []
for i, e in enumerate(dist[1]):
s += e
if s >= thresholds[thresh_i]:
if s == thresholds[thresh_i] and i + 1 < dist.shape[1]:
q.append(float((dist[0, i] + dist[0, i + 1]) / 2))
else:
q.append(float(dist[0, i]))
thresh_i += 1
if thresh_i == 3:
break
while len(q) < 3:
q.append(q[-1])
self.q25, self.median, self.q75 = q
# noinspection PyUnresolvedReferences
class OWBoxPlot(widget.OWWidget):
"""
Here's how the widget's functions call each other:
- `set_data` is a signal handler fills the list boxes and calls `attr_changed`.
- `attr_changed` handles changes of attribute or grouping (callbacks for
list boxes). It recomputes box data by calling `compute_box_data`, shows
the appropriate display box (discrete/continuous) and then calls
`layout_changed`
- `layout_changed` constructs all the elements for the scene (as lists of
QGraphicsItemGroup) and calls `display_changed`. It is called when the
attribute or grouping is changed (by attr_changed) and on resize event.
- `display_changed` puts the elements corresponding to the current display
settings on the scene. It is called when the elements are reconstructed
(layout is changed due to selection of attributes or resize event), or
when the user changes display settings or colors.
For discrete attributes, the flow is a bit simpler: the elements are not
constructed in advance (by layout_changed). Instead, layout_changed and
display_changed call display_changed_disc that draws everything.
"""
name = "Box Plot"
description = "Visualize the distribution of feature values in a box plot."
icon = "icons/BoxPlot.svg"
priority = 100
inputs = [("Data", Orange.data.Table, "set_data")]
#: Comparison types for continuous variables
CompareNone, CompareMedians, CompareMeans = 0, 1, 2
settingsHandler = DomainContextHandler()
attributes_select = ContextSetting([0])
grouping_select = ContextSetting([0])
show_annotations = Setting(True)
compare = Setting(CompareMedians)
stattest = Setting(0)
sig_threshold = Setting(0.05)
stretched = Setting(True)
_sorting_criteria_attrs = {
CompareNone: "", CompareMedians: "median", CompareMeans: "mean"
}
_pen_axis_tick = QtGui.QPen(QtCore.Qt.white, 5)
_pen_axis = QtGui.QPen(QtCore.Qt.darkGray, 3)
_pen_median = QtGui.QPen(QtGui.QBrush(QtGui.QColor(0xff, 0xff, 0x00)), 2)
_pen_paramet = QtGui.QPen(QtGui.QBrush(QtGui.QColor(0x33, 0x00, 0xff)), 2)
_pen_dotted = QtGui.QPen(QtGui.QBrush(QtGui.QColor(0x33, 0x00, 0xff)), 1)
_pen_dotted.setStyle(QtCore.Qt.DotLine)
_post_line_pen = QtGui.QPen(QtCore.Qt.lightGray, 2)
_post_grp_pen = QtGui.QPen(QtCore.Qt.lightGray, 4)
for pen in (_pen_paramet, _pen_median, _pen_dotted,
_pen_axis, _pen_axis_tick, _post_line_pen, _post_grp_pen):
pen.setCosmetic(True)
pen.setCapStyle(QtCore.Qt.RoundCap)
pen.setJoinStyle(QtCore.Qt.RoundJoin)
_pen_axis_tick.setCapStyle(QtCore.Qt.FlatCap)
_box_brush = QtGui.QBrush(QtGui.QColor(0x33, 0x88, 0xff, 0xc0))
_axis_font = QtGui.QFont()
_axis_font.setPixelSize(12)
_label_font = QtGui.QFont()
_label_font.setPixelSize(11)
_attr_brush = QtGui.QBrush(QtGui.QColor(0x33, 0x00, 0xff))
graph_name = "box_scene"
def __init__(self):
super().__init__()
self.grouping = []
self.attributes = []
self.stats = []
self.dataset = None
self.posthoc_lines = []
self.label_txts = self.mean_labels = self.boxes = self.labels = \
self.label_txts_all = self.attr_labels = self.order = []
self.p = -1.0
self.scale_x = self.scene_min_x = self.scene_width = 0
self.label_width = 0
self.attr_list_box = gui.listBox(
self.controlArea, self, "attributes_select", "attributes",
box="Variable", callback=self.attr_changed,
sizeHint=QtCore.QSize(200, 150))
self.attr_list_box.setSizePolicy(QSizePolicy.Fixed,
QSizePolicy.MinimumExpanding)
box = gui.vBox(self.controlArea, "Grouping")
self.group_list_box = gui.listBox(
box, self, 'grouping_select', "grouping",
callback=self.attr_changed,
sizeHint=QtCore.QSize(200, 100))
self.group_list_box.setSizePolicy(QSizePolicy.Fixed,
QSizePolicy.MinimumExpanding)
# TODO: move Compare median/mean to grouping box
self.display_box = gui.vBox(self.controlArea, "Display")
gui.checkBox(self.display_box, self, "show_annotations", "Annotate",
callback=self.display_changed)
self.compare_rb = gui.radioButtonsInBox(
self.display_box, self, 'compare',
btnLabels=["No comparison", "Compare medians", "Compare means"],
callback=self.display_changed)
self.stretching_box = gui.checkBox(
self.controlArea, self, 'stretched', "Stretch bars", box='Display',
callback=self.display_changed).box
gui.vBox(self.mainArea, addSpace=True)
self.box_scene = QtGui.QGraphicsScene()
self.box_view = QtGui.QGraphicsView(self.box_scene)
self.box_view.setRenderHints(QtGui.QPainter.Antialiasing |
QtGui.QPainter.TextAntialiasing |
QtGui.QPainter.SmoothPixmapTransform)
self.box_view.viewport().installEventFilter(self)
self.mainArea.layout().addWidget(self.box_view)
e = gui.hBox(self.mainArea, addSpace=False)
self.infot1 = gui.widgetLabel(e, "<center>No test results.</center>")
self.mainArea.setMinimumWidth(650)
self.stats = self.dist = self.conts = []
self.is_continuous = False
self.update_display_box()
def eventFilter(self, obj, event):
if obj is self.box_view.viewport() and \
event.type() == QtCore.QEvent.Resize:
self.layout_changed()
return super().eventFilter(obj, event)
# noinspection PyTypeChecker
def set_data(self, dataset):
if dataset is not None and (
not bool(dataset) or not len(dataset.domain)):
dataset = None
self.closeContext()
self.dataset = dataset
self.dist = self.stats = self.conts = []
self.grouping_select = []
self.attributes_select = []
self.attr_list_box.clear()
self.group_list_box.clear()
if dataset:
domain = dataset.domain
self.attributes = [(a.name, vartype(a)) for a in domain.variables +
domain.metas if a.is_primitive()]
self.grouping = ["None"] + [(a.name, vartype(a)) for a in
domain.variables + domain.metas
if a.is_discrete]
self.grouping_select = [0]
self.attributes_select = [0]
self.openContext(self.dataset)
self.attr_changed()
else:
self.reset_all_data()
def reset_all_data(self):
self.attr_list_box.clear()
self.group_list_box.clear()
self.clear_scene()
self.infot1.setText("")
def attr_changed(self):
self.compute_box_data()
self.update_display_box()
self.layout_changed()
if self.is_continuous:
heights = 90 if self.show_annotations else 60
self.box_view.centerOn(self.scene_min_x + self.scene_width / 2,
-30 - len(self.stats) * heights / 2 + 45)
else:
self.box_view.centerOn(self.scene_width / 2,
-30 - len(self.boxes) * 40 / 2 + 45)
def compute_box_data(self):
dataset = self.dataset
if dataset is None:
self.stats = self.dist = self.conts = []
return
attr = self.attributes[self.attributes_select[0]][0]
attr = dataset.domain[attr]
self.is_continuous = attr.is_continuous
group_by = self.grouping_select[0]
if group_by:
group = self.grouping[group_by][0]
self.dist = []
self.conts = datacaching.getCached(
dataset, contingency.get_contingency,
(dataset, attr, group))
if self.is_continuous:
self.stats = [BoxData(cont) for cont in self.conts]
self.label_txts_all = dataset.domain[group].values
else:
self.dist = datacaching.getCached(
dataset, distribution.get_distribution, (dataset, attr))
self.conts = []
if self.is_continuous:
self.stats = [BoxData(self.dist)]
self.label_txts_all = [""]
self.label_txts = [txts for stat, txts in zip(self.stats,
self.label_txts_all)
if stat.N > 0]
self.stats = [stat for stat in self.stats if stat.N > 0]
def update_display_box(self):
if self.is_continuous:
self.stretching_box.hide()
self.display_box.show()
group_by = self.grouping_select[0]
self.compare_rb.setEnabled(group_by != 0)
else:
self.stretching_box.show()
self.display_box.hide()
def clear_scene(self):
self.box_scene.clear()
self.attr_labels = []
self.labels = []
self.boxes = []
self.mean_labels = []
self.posthoc_lines = []
def layout_changed(self):
self.clear_scene()
if self.dataset is None or len(self.conts) == len(self.dist) == 0:
return
if not self.is_continuous:
return self.display_changed_disc()
attr = self.attributes[self.attributes_select[0]][0]
attr = self.dataset.domain[attr]
self.mean_labels = [self.mean_label(stat, attr, lab)
for stat, lab in zip(self.stats, self.label_txts)]
self.draw_axis()
self.boxes = [self.box_group(stat) for stat in self.stats]
self.labels = [self.label_group(stat, attr, mean_lab)
for stat, mean_lab in zip(self.stats, self.mean_labels)]
self.attr_labels = [QtGui.QGraphicsSimpleTextItem(lab)
for lab in self.label_txts]
for it in itertools.chain(self.labels, self.boxes, self.attr_labels):
self.box_scene.addItem(it)
self.display_changed()
def display_changed(self):
if self.dataset is None:
return
if not self.is_continuous:
return self.display_changed_disc()
self.order = list(range(len(self.stats)))
criterion = self._sorting_criteria_attrs[self.compare]
if criterion:
self.order = sorted(
self.order, key=lambda i: getattr(self.stats[i], criterion))
heights = 90 if self.show_annotations else 60
for row, box_index in enumerate(self.order):
y = (-len(self.stats) + row) * heights + 10
self.boxes[box_index].setY(y)
labels = self.labels[box_index]
if self.show_annotations:
labels.show()
labels.setY(y)
else:
labels.hide()
label = self.attr_labels[box_index]
label.setY(y - 15 - label.boundingRect().height())
if self.show_annotations:
label.hide()
else:
stat = self.stats[box_index]
if self.compare == OWBoxPlot.CompareMedians:
pos = stat.median + 5 / self.scale_x
elif self.compare == OWBoxPlot.CompareMeans:
pos = stat.mean + 5 / self.scale_x
else:
pos = stat.q25
label.setX(pos * self.scale_x)
label.show()
r = QtCore.QRectF(self.scene_min_x, -30 - len(self.stats) * heights,
self.scene_width, len(self.stats) * heights + 90)
self.box_scene.setSceneRect(r)
self.compute_tests()
self.show_posthoc()
def display_changed_disc(self):
self.clear_scene()
self.attr_labels = [QtGui.QGraphicsSimpleTextItem(lab)
for lab in self.label_txts_all]
if not self.stretched:
if self.grouping_select[0]:
self.labels = [QtGui.QGraphicsTextItem("{}".format(int(sum(cont))))
for cont in self.conts]
else:
self.labels = [QtGui.QGraphicsTextItem(str(int(sum(self.dist))))]
self.draw_axis_disc()
if self.grouping_select[0]:
self.boxes = [self.strudel(cont) for cont in self.conts]
else:
self.boxes = [self.strudel(self.dist)]
selected_grouping = self.grouping[self.grouping_select[0]][0]
selected_attribute = self.attributes[self.attributes_select[0]][0]
for row, box in enumerate(self.boxes):
y = (-len(self.boxes) + row) * 40 + 10
self.box_scene.addItem(box)
box.setPos(0, y)
label = self.attr_labels[row]
b = label.boundingRect()
label.setPos(-b.width() - 10, y - b.height() / 2)
self.box_scene.addItem(label)
if not self.stretched:
label = self.labels[row]
b = label.boundingRect()
if self.grouping_select[0]:
right = self.scale_x * sum(self.conts[row])
else:
right = self.scale_x * sum(self.dist)
label.setPos(right + 10, y - b.height() / 2)
self.box_scene.addItem(label)
if selected_attribute != selected_grouping:
attr = self.attributes[self.attributes_select[0]][0]
selected_attr = self.dataset.domain[attr]
for label_text, bar_part in zip(selected_attr.values,
box.childItems()):
label = QtGui.QGraphicsSimpleTextItem(label_text)
label.setPos(bar_part.boundingRect().x(),
y - label.boundingRect().height() - 8)
self.box_scene.addItem(label)
self.box_scene.setSceneRect(-self.label_width - 5,
-30 - len(self.boxes) * 40,
self.scene_width, len(self.boxes * 40) + 90)
self.infot1.setText("")
# noinspection PyPep8Naming
def compute_tests(self):
# The t-test and ANOVA are implemented here since they efficiently use
# the widget-specific data in self.stats.
# The non-parametric tests can't do this, so we use statistics.tests
def stat_ttest():
d1, d2 = self.stats
pooled_var = d1.var / d1.N + d2.var / d2.N
df = pooled_var ** 2 / \
((d1.var / d1.N) ** 2 / (d1.N - 1) +
(d2.var / d2.N) ** 2 / (d2.N - 1))
t = abs(d1.mean - d2.mean) / math.sqrt(pooled_var)
p = 2 * (1 - scipy.special.stdtr(df, t))
return t, p
# TODO: Check this function
# noinspection PyPep8Naming
def stat_ANOVA():
N = sum(stat.N for stat in self.stats)
grand_avg = sum(stat.N * stat.mean for stat in self.stats) / N
var_between = sum(stat.N * (stat.mean - grand_avg) ** 2
for stat in self.stats)
df_between = len(self.stats) - 1
var_within = sum(stat.N * stat.var for stat in self.stats)
df_within = N - len(self.stats)
F = (var_between / df_between) / (var_within / df_within)
p = 1 - scipy.special.fdtr(df_between, df_within, F)
return F, p
if self.compare == OWBoxPlot.CompareNone or len(self.stats) < 2:
t = ""
elif any(s.N <= 1 for s in self.stats):
t = "At least one group has just one instance, " \
"cannot compute significance"
elif len(self.stats) == 2:
if self.compare == OWBoxPlot.CompareMedians:
t = ""
# z, self.p = tests.wilcoxon_rank_sum(
# self.stats[0].dist, self.stats[1].dist)
# t = "Mann-Whitney's z: %.1f (p=%.3f)" % (z, self.p)
else:
t, self.p = stat_ttest()
t = "Student's t: %.3f (p=%.3f)" % (t, self.p)
else:
if self.compare == OWBoxPlot.CompareMedians:
t = ""
# U, self.p = -1, -1
# t = "Kruskal Wallis's U: %.1f (p=%.3f)" % (U, self.p)
else:
F, self.p = stat_ANOVA()
t = "ANOVA: %.3f (p=%.3f)" % (F, self.p)
self.infot1.setText("<center>%s</center>" % t)
def mean_label(self, stat, attr, val_name):
label = QtGui.QGraphicsItemGroup()
t = QtGui.QGraphicsSimpleTextItem(
"%.*f" % (attr.number_of_decimals + 1, stat.mean), label)
t.setFont(self._label_font)
bbox = t.boundingRect()
w2, h = bbox.width() / 2, bbox.height()
t.setPos(-w2, -h)
tpm = QtGui.QGraphicsSimpleTextItem(
" \u00b1 " + "%.*f" % (attr.number_of_decimals + 1, stat.dev),
label)
tpm.setFont(self._label_font)
tpm.setPos(w2, -h)
if val_name:
vnm = QtGui.QGraphicsSimpleTextItem(val_name + ": ", label)
vnm.setFont(self._label_font)
vnm.setBrush(self._attr_brush)
vb = vnm.boundingRect()
label.min_x = -w2 - vb.width()
vnm.setPos(label.min_x, -h)
else:
label.min_x = -w2
return label
def draw_axis(self):
"""Draw the horizontal axis and sets self.scale_x"""
bottom = min(stat.a_min for stat in self.stats)
top = max(stat.a_max for stat in self.stats)
first_val, step = compute_scale(bottom, top)
while bottom <= first_val:
first_val -= step
bottom = first_val
no_ticks = math.ceil((top - first_val) / step) + 1
top = max(top, first_val + no_ticks * step)
gbottom = min(bottom, min(stat.mean - stat.dev for stat in self.stats))
gtop = max(top, max(stat.mean + stat.dev for stat in self.stats))
bv = self.box_view
viewrect = bv.viewport().rect().adjusted(15, 15, -15, -30)
self.scale_x = scale_x = viewrect.width() / (gtop - gbottom)
# In principle we should repeat this until convergence since the new
# scaling is too conservative. (No chance am I doing this.)
mlb = min(stat.mean + mean_lab.min_x / scale_x
for stat, mean_lab in zip(self.stats, self.mean_labels))
if mlb < gbottom:
gbottom = mlb
self.scale_x = scale_x = viewrect.width() / (gtop - gbottom)
self.scene_min_x = gbottom * scale_x
self.scene_width = (gtop - gbottom) * scale_x
val = first_val
attr = self.attributes[self.attributes_select[0]][0]
attr_desc = self.dataset.domain[attr]
while True:
l = self.box_scene.addLine(val * scale_x, -1, val * scale_x, 1,
self._pen_axis_tick)
l.setZValue(100)
t = self.box_scene.addSimpleText(
attr_desc.repr_val(val), self._axis_font)
t.setFlags(t.flags() |
QtGui.QGraphicsItem.ItemIgnoresTransformations)
r = t.boundingRect()
t.setPos(val * scale_x - r.width() / 2, 8)
if val >= top:
break
val += step
self.box_scene.addLine(bottom * scale_x - 4, 0,
top * scale_x + 4, 0, self._pen_axis)
def draw_axis_disc(self):
"""
Draw the horizontal axis and sets self.scale_x for discrete attributes
"""
if self.stretched:
step = steps = 10
else:
if self.grouping_select[0]:
max_box = max(float(np.sum(dist)) for dist in self.conts)
else:
max_box = float(np.sum(self.dist))
if max_box == 0:
self.scale_x = 1
return
_, step = compute_scale(0, max_box)
step = int(step) if step > 1 else 1
steps = int(math.ceil(max_box / step))
max_box = step * steps
bv = self.box_view
viewrect = bv.viewport().rect().adjusted(15, 15, -15, -30)
self.scene_width = viewrect.width()
lab_width = max(lab.boundingRect().width() for lab in self.attr_labels)
lab_width = max(lab_width, 40)
lab_width = min(lab_width, self.scene_width / 3)
self.label_width = lab_width
right_offset = 0 # offset for the right label
if not self.stretched and self.labels:
if self.grouping_select[0]:
rows = list(zip(self.conts, self.labels))
else:
rows = [(self.dist, self.labels[0])]
# available space left of the 'group labels'
available = self.scene_width - lab_width - 10
scale_x = (available - right_offset) / max_box
max_right = max(sum(dist) * scale_x + 10 +
lbl.boundingRect().width()
for dist, lbl in rows)
right_offset = max(0, max_right - max_box * scale_x)
self.scale_x = scale_x = (self.scene_width - lab_width - 10 - right_offset) / max_box
self.box_scene.addLine(0, 0, max_box * scale_x, 0, self._pen_axis)
for val in range(0, step * steps + 1, step):
l = self.box_scene.addLine(val * scale_x, -1, val * scale_x, 1,
self._pen_axis_tick)
l.setZValue(100)
t = self.box_scene.addSimpleText(str(val), self._axis_font)
t.setPos(val * scale_x - t.boundingRect().width() / 2, 8)
if self.stretched:
self.scale_x *= 100
def label_group(self, stat, attr, mean_lab):
def centered_text(val, pos):
t = QtGui.QGraphicsSimpleTextItem(
"%.*f" % (attr.number_of_decimals + 1, val), labels)
t.setFont(self._label_font)
bbox = t.boundingRect()
t.setPos(pos - bbox.width() / 2, 22)
return t
def line(x, down=1):
QtGui.QGraphicsLineItem(x, 12 * down, x, 20 * down, labels)
def move_label(label, frm, to):
label.setX(to)
to += t_box.width() / 2
path = QtGui.QPainterPath()
path.lineTo(0, 4)
path.lineTo(to - frm, 4)
path.lineTo(to - frm, 8)
p = QtGui.QGraphicsPathItem(path)
p.setPos(frm, 12)
labels.addToGroup(p)
labels = QtGui.QGraphicsItemGroup()
labels.addToGroup(mean_lab)
m = stat.mean * self.scale_x
mean_lab.setPos(m, -22)
line(m, -1)
msc = stat.median * self.scale_x
med_t = centered_text(stat.median, msc)
med_box_width2 = med_t.boundingRect().width()
line(msc)
x = stat.q25 * self.scale_x
t = centered_text(stat.q25, x)
t_box = t.boundingRect()
med_left = msc - med_box_width2
if x + t_box.width() / 2 >= med_left - 5:
move_label(t, x, med_left - t_box.width() - 5)
else:
line(x)
x = stat.q75 * self.scale_x
t = centered_text(stat.q75, x)
t_box = t.boundingRect()
med_right = msc + med_box_width2
if x - t_box.width() / 2 <= med_right + 5:
move_label(t, x, med_right + 5)
else:
line(x)
return labels
def box_group(self, stat, height=20):
def line(x0, y0, x1, y1, *args):
return QtGui.QGraphicsLineItem(x0 * scale_x, y0, x1 * scale_x, y1,
*args)
scale_x = self.scale_x
box = QtGui.QGraphicsItemGroup()
whisker1 = line(stat.a_min, -1.5, stat.a_min, 1.5, box)
whisker2 = line(stat.a_max, -1.5, stat.a_max, 1.5, box)
vert_line = line(stat.a_min, 0, stat.a_max, 0, box)
mean_line = line(stat.mean, -height / 3, stat.mean, height / 3, box)
for it in (whisker1, whisker2, mean_line):
it.setPen(self._pen_paramet)
vert_line.setPen(self._pen_dotted)
var_line = line(stat.mean - stat.dev, 0, stat.mean + stat.dev, 0, box)
var_line.setPen(self._pen_paramet)
mbox = QtGui.QGraphicsRectItem(stat.q25 * scale_x, -height / 2,
(stat.q75 - stat.q25) * scale_x, height,
box)
mbox.setBrush(self._box_brush)
mbox.setPen(QtGui.QPen(QtCore.Qt.NoPen))
mbox.setZValue(-200)
median_line = line(stat.median, -height / 2,
stat.median, height / 2, box)
median_line.setPen(self._pen_median)
median_line.setZValue(-150)
return box
def strudel(self, dist):
attr = self.attributes[self.attributes_select[0]][0]
attr = self.dataset.domain[attr]
ss = np.sum(dist)
box = QtGui.QGraphicsItemGroup()
if ss < 1e-6:
QtGui.QGraphicsRectItem(0, -10, 1, 10, box)
cum = 0
for i, v in enumerate(dist):
if v < 1e-6:
continue
if self.stretched:
v /= ss
v *= self.scale_x
rect = QtGui.QGraphicsRectItem(cum + 1, -6, v - 2, 12, box)
rect.setBrush(QtGui.QBrush(QtGui.QColor(*attr.colors[i])))
rect.setPen(QtGui.QPen(QtCore.Qt.NoPen))
if self.stretched:
tooltip = "{}: {:.2f}%".format(attr.values[i],
100 * dist[i] / sum(dist))
else:
tooltip = "{}: {}".format(attr.values[i], int(dist[i]))
rect.setToolTip(tooltip)
cum += v
return box
def show_posthoc(self):
def line(y0, y1):
it = self.box_scene.addLine(x, y0, x, y1, self._post_line_pen)
it.setZValue(-100)
self.posthoc_lines.append(it)
while self.posthoc_lines:
self.box_scene.removeItem(self.posthoc_lines.pop())
if self.compare == OWBoxPlot.CompareNone or len(self.stats) < 2:
return
if self.compare == OWBoxPlot.CompareMedians:
crit_line = "median"
elif self.compare == OWBoxPlot.CompareMeans:
crit_line = "mean"
else:
assert False
xs = []
height = 90 if self.show_annotations else 60
y_up = -len(self.stats) * height + 10
for pos, box_index in enumerate(self.order):
stat = self.stats[box_index]
x = getattr(stat, crit_line) * self.scale_x
xs.append(x)
by = y_up + pos * height
line(by + 12, 3)
line(by - 12, by - 25)
used_to = []
last_to = 0
for frm, frm_x in enumerate(xs[:-1]):
for to in range(frm + 1, len(xs)):
if xs[to] - frm_x > 1.5:
to -= 1
break
if last_to == to or frm == to:
continue
for rowi, used in enumerate(used_to):
if used < frm:
used_to[rowi] = to
break
else:
rowi = len(used_to)
used_to.append(to)
y = - 6 - rowi * 6
it = self.box_scene.addLine(frm_x - 2, y, xs[to] + 2, y,
self._post_grp_pen)
self.posthoc_lines.append(it)
last_to = to
def get_widget_name_extension(self):
if self.attributes_select and len(self.attributes):
return self.attributes[self.attributes_select[0]][0]
def send_report(self):
self.report_plot()
text = ""
if self.attributes_select and len(self.attributes):
text += "Box plot for attribute '{}' ".format(
self.attributes[self.attributes_select[0]][0])
if self.grouping_select and len(self.grouping):
text += "grouped by '{}'".format(
self.grouping[self.grouping_select[0]][0])
if text:
self.report_caption(text)
def main(argv=None):
if argv is None:
argv = sys.argv
argv = list(argv)
app = QtGui.QApplication(argv)
if len(argv) > 1:
filename = argv[1]
else:
filename = "brown-selected"
data = Orange.data.Table(filename)
w = OWBoxPlot()
w.show()
w.raise_()
w.set_data(data)
w.handleNewSignals()
rval = app.exec_()
w.set_data(None)
w.handleNewSignals()
w.saveSettings()
return rval
if __name__ == "__main__":
sys.exit(main())
|
bsd-2-clause
| 7,862,687,610,765,192,000
| 36.807082
| 93
| 0.536171
| false
| 3.590445
| false
| false
| false
|
beewizzard/discord-dicebot
|
diceroll_bot.py
|
1
|
5786
|
import discord
import asyncio
from discord.errors import HTTPException
import os
import random
import re
# Use these variable to limit overloading of the dice roller
MAXIMUM_DICE_ARGS = 10
MAXIMUM_DICE_COUNT = 1000
MAXIMUM_DICE_SIDES = 1000
MAX_MESSAGE_LENGTH = 2000
class Error(Exception):
"""Base class for exceptions"""
pass
class DiceFormatError(Error):
"""Exception raised for errors in dice string format.
Attributes:
invalid_dice_str -- the invalid dice string which caused this exception to be raised
"""
error_format = "__Error__: {0.mention} provided invalid dice [{1}].\n" \
"Valid format is <x>d<y>[(+|-)<z>].\n" \
"All values must be positive integers."
def __init__(self, invalid_dice_str):
self.invalid_dice_str = invalid_dice_str
def get_error_string(self, author):
return self.error_format.format(author, self.invalid_dice_str)
class DiceValueError(Error):
"""Exception raised for errors in dice values
Attributes:
invalid_dice_str -- the invalid dice string which caused this exception to be raised
"""
error_format = "__Error__: {0.mention} gave a bad value for [{1}].\n" \
"Dice count maximum: {2}\n" \
"Dice sides maximum: {3}"
def __init__(self, invalid_dice_str):
self.invalid_dice_str = invalid_dice_str
def get_error_string(self, author):
return self.error_format.format(
author,
self.invalid_dice_str,
MAXIMUM_DICE_COUNT,
MAXIMUM_DICE_SIDES,
)
def get_roll(dice_str, rng=random.SystemRandom()):
"""
Simulates the effect of rolling one or more dice.
:param dice_str: A dice string with the following format (invalid format raises an InputError):
<x>d<y>[(+|-)<z>]
where x, y, and z are all positive integers.
x and y may be no greater than 1000, else a ValueError is raised.
:param rng: A random number generator. Defaults to random.SystemRandom()
:return: An int list of all dice rolled
"""
match = re.match(r'^(\d+)d(\d+)([+-]\d+)?$', dice_str)
if match:
result = []
add = 0
num_dice = int(match.group(1))
num_sides = int(match.group(2))
if match.group(3):
add = int(match.group(3))
# Check for valid dice count and sides
if num_dice > MAXIMUM_DICE_COUNT or num_sides > MAXIMUM_DICE_SIDES:
raise DiceValueError(dice_str)
for x in range(0, num_dice):
roll = rng.randint(1, num_sides) + add
result.append(roll)
return result
else:
raise DiceFormatError(dice_str)
client = discord.Client()
@client.event
async def on_ready():
print("Logged in as")
print(client.user.name)
print(client.user.id)
print("------")
@client.event
async def on_message(message):
try:
command, *args = message.content.split()
if command == '!roll-help':
#
# !help
#
pass
elif command == '!roll':
#
# !roll
#
rng = random.SystemRandom()
if len(args) > MAXIMUM_DICE_ARGS:
# Let the author know that only the first MAXIMUM_DICE_ARGS dice were considered
output = "Warning {0.mention}: maximum dice arguments is {1}. Proceeding with first {1} " \
"arguments...".format(message.author, MAXIMUM_DICE_ARGS)
await client.send_message(message.channel, output)
dice_list = args[:MAXIMUM_DICE_ARGS]
response_format = "{0.mention} rolled:\n{1}"
roll_format = "**{0}**: {1}"
if len(dice_list) == 0:
output = roll_format.format("1d20", str(rng.randint(1, 20)))
await client.send_message(message.channel, response_format.format(message.author, output))
else:
try:
rolls = [roll_format.format(dice_str, " ".join([str(x) for x in get_roll(dice_str, rng)]))
for dice_str in dice_list]
output = "\n".join(rolls)
# Check to make sure the message isn't too long
if len(output) > MAX_MESSAGE_LENGTH:
# TODO: split up the message and deliver in pieces
await client.send_message(message.channel, "__Error__: {0.mention} The response was too long "
"for the server to handle. Try fewer/smaller dice.".
format(message.author))
else:
await client.send_message(message.channel, response_format.format(message.author, output))
except DiceFormatError as e:
await client.send_message(message.channel, e.get_error_string(message.author))
except DiceValueError as e:
await client.send_message(message.channel, e.get_error_string(message.author))
except HTTPException:
await client.send_message(message.channel, "__Error__: {0.mention} An error occurred while "
"attempting to communicate with the server.".
format(message.author))
# TODO: catch all other exceptions and log to file
# TODO: Add "try !roll-help" to end of every error message
except ValueError:
# Empty message. Do nothing
pass
client.run(os.environ['DISCORD_DICEROLL_TOKEN'])
|
gpl-3.0
| 5,443,215,532,587,629,000
| 34.496933
| 119
| 0.55859
| false
| 3.995856
| false
| false
| false
|
sachingupta006/Mezzanine
|
mezzanine/generic/forms.py
|
1
|
5182
|
from django import forms
from django.contrib.comments.forms import CommentSecurityForm, CommentForm
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import settings
from mezzanine.core.forms import Html5Mixin
from mezzanine.generic.models import Keyword, ThreadedComment, RATING_RANGE
class KeywordsWidget(forms.MultiWidget):
"""
Form field for the ``KeywordsField`` generic relation field. Since
the admin with model forms has no form field for generic
relations, this form field provides a single field for managing
the keywords. It contains two actual widgets, a text input for
entering keywords, and a hidden input that stores the ID of each
``Keyword`` instance.
The attached JavaScript adds behaviour so that when the form is
submitted, an AJAX post is made that passes the list of keywords
in the text input, and returns a list of keyword IDs which are
then entered into the hidden input before the form submits. The
list of IDs in the hidden input is what is used when retrieving
an actual value from the field for the form.
"""
class Media:
js = ("mezzanine/js/%s" % settings.JQUERY_FILENAME,
"mezzanine/js/admin/keywords_field.js",)
def __init__(self, attrs=None):
"""
Setup the text and hidden form field widgets.
"""
widgets = (forms.HiddenInput,
forms.TextInput(attrs={"class": "vTextField"}))
super(KeywordsWidget, self).__init__(widgets, attrs)
self._ids = []
def decompress(self, value):
"""
Takes the sequence of ``AssignedKeyword`` instances and splits
them into lists of keyword IDs and titles each mapping to one
of the form field widgets.
"""
if hasattr(value, "select_related"):
keywords = [a.keyword for a in value.select_related("keyword")]
if keywords:
keywords = [(str(k.id), k.title) for k in keywords]
self._ids, words = zip(*keywords)
return (",".join(self._ids), ", ".join(words))
return ("", "")
def format_output(self, rendered_widgets):
"""
Wraps the output HTML with a list of all available ``Keyword``
instances that can be clicked on to toggle a keyword.
"""
rendered = super(KeywordsWidget, self).format_output(rendered_widgets)
links = ""
for keyword in Keyword.objects.all().order_by("title"):
prefix = "+" if str(keyword.id) not in self._ids else "-"
links += ("<a href='#'>%s%s</a>" % (prefix, unicode(keyword)))
rendered += mark_safe("<p class='keywords-field'>%s</p>" % links)
return rendered
def value_from_datadict(self, data, files, name):
"""
Return the comma separated list of keyword IDs for use in
``KeywordsField.save_form_data()``.
"""
return data.get("%s_0" % name, "")
class ThreadedCommentForm(CommentForm, Html5Mixin):
name = forms.CharField(label=_("Name"), help_text=_("required"),
max_length=50)
email = forms.EmailField(label=_("Email"),
help_text=_("required (not published)"))
url = forms.URLField(label=_("Website"), help_text=_("optional"),
required=False)
# These are used to get/set prepopulated fields via cookies.
cookie_fields = ("name", "email", "url")
cookie_prefix = "mezzanine-comment-"
def __init__(self, request, *args, **kwargs):
"""
Set some initial field values from cookies or the logged in
user, and apply some HTML5 attributes to the fields if the
``FORMS_USE_HTML5`` setting is ``True``.
The default values that are filled in the CommentForm has been
changed such that preference is given to user values than
cookie values
"""
kwargs.setdefault("initial", {})
user = request.user
for field in ThreadedCommentForm.cookie_fields:
cookie_name = ThreadedCommentForm.cookie_prefix + field
if user.is_authenticated():
if field == "name":
value = user.get_full_name()
if not value:
value = user.username
elif field == "email":
value = user.email
else:
value=""
else:
value = request.COOKIES.get(cookie_name, "")
kwargs["initial"][field] = value
super(ThreadedCommentForm, self).__init__(*args, **kwargs)
def get_comment_model(self):
"""
Use the custom comment model instead of the built-in one.
"""
return ThreadedComment
class RatingForm(CommentSecurityForm):
"""
Form for a rating. Subclasses ``CommentSecurityForm`` to make use
of its easy setup for generic relations.
"""
value = forms.ChoiceField(label="", widget=forms.RadioSelect,
choices=zip(RATING_RANGE, RATING_RANGE))
|
bsd-2-clause
| -7,536,275,535,220,474,000
| 38.557252
| 78
| 0.606716
| false
| 4.455718
| false
| false
| false
|
Sabayon/entropy
|
client/solo/commands/help.py
|
1
|
3023
|
# -*- coding: utf-8 -*-
"""
@author: Fabio Erculiani <lxnay@sabayon.org>
@contact: lxnay@sabayon.org
@copyright: Fabio Erculiani
@license: GPL-2
B{Entropy Command Line Client}.
"""
import argparse
from entropy.i18n import _
from entropy.output import teal, purple, darkgreen
from _entropy.solo.colorful import ColorfulFormatter
from _entropy.solo.commands.descriptor import SoloCommandDescriptor
from _entropy.solo.commands.command import SoloCommand
class SoloHelp(SoloCommand):
"""
Main Solo help command.
"""
NAME = "help"
ALIASES = ["-h", "--help"]
CATCH_ALL = True
def parse(self):
"""
Parse help command
"""
return self._show_help, []
def bashcomp(self, last_arg):
"""
Overridden from SoloCommand
"""
import sys
descriptors = SoloCommandDescriptor.obtain()
descriptors.sort(key = lambda x: x.get_name())
outcome = []
for descriptor in descriptors:
name = descriptor.get_name()
if name == SoloHelp.NAME:
# do not add self
continue
outcome.append(name)
aliases = descriptor.get_class().ALIASES
outcome.extend(aliases)
def _startswith(string):
if last_arg is not None:
return string.startswith(last_arg)
return True
outcome = sorted(filter(_startswith, outcome))
sys.stdout.write(" ".join(outcome) + "\n")
sys.stdout.flush()
def _show_help(self, *args):
# equo help <foo> <bar>
if len(self._args) > 1:
# syntax error
return -10
parser = argparse.ArgumentParser(
description=_("Entropy Command Line Client, Equo"),
epilog="http://www.sabayon.org",
formatter_class=ColorfulFormatter)
# filtered out in solo.main. Will never get here
parser.add_argument(
"--color", action="store_true",
default=None, help=_("force colored output"))
descriptors = SoloCommandDescriptor.obtain()
descriptors.sort(key = lambda x: x.get_name())
group = parser.add_argument_group("command", "available commands")
for descriptor in descriptors:
if descriptor.get_class().HIDDEN:
continue
aliases = descriptor.get_class().ALIASES
aliases_str = ", ".join([teal(x) for x in aliases])
if aliases_str:
aliases_str = " [%s]" % (aliases_str,)
name = "%s%s" % (purple(descriptor.get_name()),
aliases_str)
desc = descriptor.get_description()
group.add_argument(name, help=darkgreen(desc), action="store_true")
parser.print_help()
if not self._args:
return 1
return 0
SoloCommandDescriptor.register(
SoloCommandDescriptor(
SoloHelp,
SoloHelp.NAME,
_("this help"))
)
|
gpl-2.0
| -6,783,595,746,927,479,000
| 28.349515
| 79
| 0.571948
| false
| 4.052279
| false
| false
| false
|
jriehl/numba
|
numba/typing/collections.py
|
1
|
4091
|
from __future__ import print_function, division, absolute_import
from .. import types, utils, errors
import operator
from .templates import (AttributeTemplate, ConcreteTemplate, AbstractTemplate,
infer_global, infer, infer_getattr,
signature, bound_function, make_callable_template)
from .builtins import normalize_1d_index
@infer_global(operator.contains)
class InContainer(AbstractTemplate):
key = operator.contains
def generic(self, args, kws):
cont, item = args
if isinstance(cont, types.Container):
return signature(types.boolean, cont, cont.dtype)
@infer_global(len)
class ContainerLen(AbstractTemplate):
def generic(self, args, kws):
assert not kws
(val,) = args
if isinstance(val, (types.Container)):
return signature(types.intp, val)
@infer_global(operator.truth)
class SequenceBool(AbstractTemplate):
key = operator.truth
def generic(self, args, kws):
assert not kws
(val,) = args
if isinstance(val, (types.Sequence)):
return signature(types.boolean, val)
@infer_global(operator.getitem)
class GetItemSequence(AbstractTemplate):
key = operator.getitem
def generic(self, args, kws):
seq, idx = args
if isinstance(seq, types.Sequence):
idx = normalize_1d_index(idx)
if isinstance(idx, types.SliceType):
# Slicing a tuple only supported with static_getitem
if not isinstance(seq, types.BaseTuple):
return signature(seq, seq, idx)
elif isinstance(idx, types.Integer):
return signature(seq.dtype, seq, idx)
@infer_global(operator.setitem)
class SetItemSequence(AbstractTemplate):
def generic(self, args, kws):
seq, idx, value = args
if isinstance(seq, types.MutableSequence):
idx = normalize_1d_index(idx)
if isinstance(idx, types.SliceType):
return signature(types.none, seq, idx, seq)
elif isinstance(idx, types.Integer):
if not self.context.can_convert(value, seq.dtype):
msg = "invalid setitem with value of {} to element of {}"
raise errors.TypingError(msg.format(types.unliteral(value), seq.dtype))
return signature(types.none, seq, idx, seq.dtype)
@infer_global(operator.delitem)
class DelItemSequence(AbstractTemplate):
def generic(self, args, kws):
seq, idx = args
if isinstance(seq, types.MutableSequence):
idx = normalize_1d_index(idx)
return signature(types.none, seq, idx)
# --------------------------------------------------------------------------
# named tuples
@infer_getattr
class NamedTupleAttribute(AttributeTemplate):
key = types.BaseNamedTuple
def resolve___class__(self, tup):
return types.NamedTupleClass(tup.instance_class)
def generic_resolve(self, tup, attr):
# Resolution of other attributes
try:
index = tup.fields.index(attr)
except ValueError:
return
return tup[index]
@infer_getattr
class NamedTupleClassAttribute(AttributeTemplate):
key = types.NamedTupleClass
def resolve___call__(self, classty):
"""
Resolve the named tuple constructor, aka the class's __call__ method.
"""
instance_class = classty.instance_class
pysig = utils.pysignature(instance_class)
def typer(*args, **kws):
# Fold keyword args
try:
bound = pysig.bind(*args, **kws)
except TypeError as e:
msg = "In '%s': %s" % (instance_class, e)
e.args = (msg,)
raise
assert not bound.kwargs
return types.BaseTuple.from_types(bound.args, instance_class)
# Override the typer's pysig to match the namedtuple constructor's
typer.pysig = pysig
return types.Function(make_callable_template(self.key, typer))
|
bsd-2-clause
| -9,103,186,875,090,370,000
| 32.260163
| 91
| 0.609875
| false
| 4.248183
| false
| false
| false
|
enixdark/im-r-e-d-i-s
|
flask-cook/migrations/versions/399106d8a6ad_.py
|
1
|
1071
|
"""empty message
Revision ID: 399106d8a6ad
Revises: None
Create Date: 2015-03-06 03:55:19.157958
"""
# revision identifiers, used by Alembic.
revision = '399106d8a6ad'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('category',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('product',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('price', sa.Float(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['category.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('product')
op.drop_table('category')
### end Alembic commands ###
|
mit
| -4,522,302,353,539,535,000
| 26.461538
| 64
| 0.661998
| false
| 3.488599
| false
| false
| false
|
stonebig/bokeh
|
bokeh/model.py
|
1
|
29925
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Provide a base class for all objects (called Bokeh Models) that can go in
a Bokeh |Document|.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from json import loads
from operator import itemgetter
# External imports
from six import iteritems, string_types
# Bokeh imports
from .core.json_encoder import serialize_json
from .core.properties import Any, Dict, Instance, List, String
from .core.has_props import HasProps, MetaHasProps
from .core.query import find
from .events import Event
from .themes import default as default_theme
from .util.callback_manager import PropertyCallbackManager, EventCallbackManager
from .util.future import with_metaclass
from .util.serialization import make_id
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'collect_models',
'get_class',
'Model',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
def collect_filtered_models(discard, *input_values):
''' Collect a duplicate-free list of all other Bokeh models referred to by
this model, or by any of its references, etc, unless filtered-out by the
provided callable.
Iterate over ``input_values`` and descend through their structure
collecting all nested ``Models`` on the go.
Args:
*discard (Callable[[Model], bool])
a callable which accepts a *Model* instance as its single argument
and returns a boolean stating whether to discard the instance. The
latter means that the instance will not be added to collected
models nor will its references be explored.
*input_values (Model)
Bokeh models to collect other models from
Returns:
None
'''
ids = set([])
collected = []
queued = []
def queue_one(obj):
if obj.id not in ids and not (callable(discard) and discard(obj)):
queued.append(obj)
for value in input_values:
_visit_value_and_its_immediate_references(value, queue_one)
while queued:
obj = queued.pop(0)
if obj.id not in ids:
ids.add(obj.id)
collected.append(obj)
_visit_immediate_value_references(obj, queue_one)
return collected
def collect_models(*input_values):
''' Collect a duplicate-free list of all other Bokeh models referred to by
this model, or by any of its references, etc.
Iterate over ``input_values`` and descend through their structure
collecting all nested ``Models`` on the go. The resulting list is
duplicate-free based on objects' identifiers.
Args:
*input_values (Model)
Bokeh models to collect other models from
Returns:
list[Model] : all models reachable from this one.
'''
return collect_filtered_models(None, *input_values)
def get_class(view_model_name):
''' Look up a Bokeh model class, given its view model name.
Args:
view_model_name (str) :
A view model name for a Bokeh model to look up
Returns:
Model: the model class corresponding to ``view_model_name``
Raises:
KeyError, if the model cannot be found
Example:
.. code-block:: python
>>> from bokeh.model import get_class
>>> get_class("Range1d")
<class 'bokeh.models.ranges.Range1d'>
'''
# in order to look up from the model catalog that MetaModel maintains, it
# has to be creates first. These imports ensure that all built-in Bokeh
# models are represented in the catalog.
from . import models; models
from .plotting import Figure; Figure
d = MetaModel.model_class_reverse_map
if view_model_name in d:
return d[view_model_name]
else:
raise KeyError("View model name '%s' not found" % view_model_name)
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
class MetaModel(MetaHasProps):
''' Specialize the construction of |Model| classes.
This class is a `metaclass`_ for |Model| that is responsible for
automatically cataloging all Bokeh models that get defined, so that the
serialization machinery between Bokeh and BokehJS can function properly.
.. note::
It is worth pointing out explicitly that this relies on the rules
for Metaclass inheritance in Python.
Bokeh works by replicating Python model objects (e.g. plots, ranges,
data sources, which are all |HasProps| subclasses) into BokehJS. In the
case of using a Bokeh server, the Bokeh model objects can also be
synchronized bidirectionally. This is accomplished by serializing the
models to and from a JSON format, that includes the name of the model type
as part of the payload, as well as a unique ID, and all the attributes:
.. code-block:: javascript
{
type: "Plot",
id: 100032,
attributes: { ... }
}
Typically the type name is inferred automatically from the Python class
name, and is set as the ``__view_model__`` class attribute on the Model
class that is create. But it is also possible to override this value
explicitly:
.. code-block:: python
class Foo(Model): pass
class Bar(Model):
__view_model__ == "Quux"
This metaclass will raise an error if two Bokeh models are created that
attempt to have the same view model name. The only exception made is if
one of the models has a custom ``__implementation__`` in its class
definition.
This metaclass also handles subtype relationships between Bokeh models.
Occasionally it may be necessary for multiple class types on the Python
side to resolve to the same type on the BokehJS side. This is called
subtyping, and is expressed through a ``__subtype__`` class attribute on
a model:
.. code-block:: python
class Foo(Model): pass
class Bar(Foo):
__view_model__ = "Foo"
__subtype__ = "Bar"
In this case, python instances of ``Foo`` and ``Bar`` will both resolve to
``Foo`` models in BokehJS. In the context of a Bokeh server application,
the original python types will be faithfully round-tripped. (Without the
``__subtype__`` specified, the above code would raise an error due to
duplicate view model names.)
.. _metaclass: https://docs.python.org/3/reference/datamodel.html#metaclasses
'''
model_class_reverse_map = {}
def __new__(meta_cls, class_name, bases, class_dict):
'''
Raises:
Warning
'''
# use an explicitly provided view model name if there is one
if "__view_model__" not in class_dict:
class_dict["__view_model__"] = class_name
# call the parent metaclass to create the new model type
newcls = super(MetaModel, meta_cls).__new__(meta_cls, class_name, bases, class_dict)
# update the mapping of view model names to classes, checking for any duplicates
# and handling any subtype relationships or custom implementations
entry = class_dict.get("__subtype__", class_dict["__view_model__"])
if entry in MetaModel.model_class_reverse_map and not hasattr(newcls, "__implementation__"):
raise Warning("Duplicate __view_model__ or __subtype__ declaration of '%s' for " \
"class %s. Previous definition: %s" % \
(entry, class_name,
MetaModel.model_class_reverse_map[entry]))
MetaModel.model_class_reverse_map[entry] = newcls
return newcls
_HTML_REPR = """
<script>
(function() {
var expanded = false;
var ellipsis = document.getElementById("%(ellipsis_id)s");
ellipsis.addEventListener("click", function() {
var rows = document.getElementsByClassName("%(cls_name)s");
for (var i = 0; i < rows.length; i++) {
var el = rows[i];
el.style.display = expanded ? "none" : "table-row";
}
ellipsis.innerHTML = expanded ? "…)" : "‹‹‹";
expanded = !expanded;
});
})();
</script>
"""
class Model(with_metaclass(MetaModel, HasProps, PropertyCallbackManager, EventCallbackManager)):
''' Base class for all objects stored in Bokeh |Document| instances.
'''
def __new__(cls, *args, **kwargs):
obj = super(Model, cls).__new__(cls)
obj._id = kwargs.pop("id", make_id())
obj._document = None
obj._temp_document = None
return obj
def __init__(self, **kwargs):
# "id" is popped from **kw in __new__, so in an ideal world I don't
# think it should be here too. But Python does this, so it is:
#
# class Foo(object):
# def __new__(cls, *args, **kw):
# obj = super(Foo, cls).__new__(cls)
# obj.bar = kw.pop("bar", 111)
# print("__new__ :", id(kw), kw)
# return obj
# def __init__(self, **kw):
# print("__init__ :", id(kw), kw)
#
# >>> f = Foo(bar=10)
# __new__ : 4405522296 {}
# __init__ : 4405522296 {'bar': 10}
kwargs.pop("id", None)
super(Model, self).__init__(**kwargs)
default_theme.apply_to_model(self)
def __str__(self):
return "%s(id=%r, ...)" % (self.__class__.__name__, getattr(self, "id", None))
__repr__ = __str__
@property
def id(self):
return self._id
name = String(help="""
An arbitrary, user-supplied name for this model.
This name can be useful when querying the document to retrieve specific
Bokeh models.
.. code:: python
>>> plot.circle([1,2,3], [4,5,6], name="temp")
>>> plot.select(name="temp")
[GlyphRenderer(id='399d53f5-73e9-44d9-9527-544b761c7705', ...)]
.. note::
No uniqueness guarantees or other conditions are enforced on any names
that are provided, nor is the name used directly by Bokeh for any
reason.
""")
tags = List(Any, help="""
An optional list of arbitrary, user-supplied values to attach to this
model.
This data can be useful when querying the document to retrieve specific
Bokeh models:
.. code:: python
>>> r = plot.circle([1,2,3], [4,5,6])
>>> r.tags = ["foo", 10]
>>> plot.select(tags=['foo', 10])
[GlyphRenderer(id='1de4c3df-a83d-480a-899b-fb263d3d5dd9', ...)]
Or simply a convenient way to attach any necessary metadata to a model
that can be accessed by ``CustomJS`` callbacks, etc.
.. note::
No uniqueness guarantees or other conditions are enforced on any tags
that are provided, nor are the tags used directly by Bokeh for any
reason.
""")
js_event_callbacks = Dict(String, List(Instance("bokeh.models.callbacks.CustomJS")),
help="""
A mapping of event names to lists of ``CustomJS`` callbacks.
Typically, rather then modifying this property directly, callbacks should be
added using the ``Model.js_on_event`` method:
.. code:: python
callback = CustomJS(code="console.log('tap event occurred')")
plot.js_on_event('tap', callback)
""")
subscribed_events = List(String, help="""
List of events that are subscribed to by Python callbacks. This is
the set of events that will be communicated from BokehJS back to
Python for this model.
""")
js_property_callbacks = Dict(String, List(Instance("bokeh.models.callbacks.CustomJS")), help="""
A mapping of attribute names to lists of ``CustomJS`` callbacks, to be set up on
BokehJS side when the document is created.
Typically, rather then modifying this property directly, callbacks should be
added using the ``Model.js_on_change`` method:
.. code:: python
callback = CustomJS(code="console.log('stuff')")
plot.x_range.js_on_change('start', callback)
""")
# Properties --------------------------------------------------------------
@property
def document(self):
''' The |Document| this model is attached to (can be ``None``)
'''
if self._temp_document is not None:
return self._temp_document
return self._document
@property
def ref(self):
''' A Bokeh protocol "reference" to this model, i.e. a dict of the
form:
.. code-block:: python
{
'type' : << view model name >>
'id' : << unique model id >>
}
Additionally there may be a `subtype` field if this model is a subtype.
'''
if "__subtype__" in self.__class__.__dict__:
return {
'type' : self.__view_model__,
'subtype' : self.__subtype__,
'id' : self.id,
}
else:
return {
'type' : self.__view_model__,
'id' : self.id,
}
# Public methods ----------------------------------------------------------
def js_on_event(self, event, *callbacks):
if not isinstance(event, string_types) and issubclass(event, Event):
event = event.event_name
if event not in self.js_event_callbacks:
self.js_event_callbacks[event] = []
for callback in callbacks:
if callback in self.js_event_callbacks[event]:
continue
self.js_event_callbacks[event].append(callback)
def js_link(self, attr, other, other_attr):
''' Link two Bokeh model properties using JavaScript.
This is a convenience method that simplifies adding a CustomJS callback
to update one Bokeh model property whenever another changes value.
Args:
attr (str) :
The name of a Bokeh property on this model
other (Model):
A Bokeh model to link to self.attr
other_attr (str) :
The property on ``other`` to link together
Added in version 1.1
Raises:
ValueError
Examples:
This code with ``js_link``:
.. code :: python
select.js_link('value', plot, 'sizing_mode')
is equivalent to the following:
.. code:: python
from bokeh.models import CustomJS
select.js_on_change('value',
CustomJS(args=dict(other=plot),
code="other.sizing_mode = this.value"
)
)
'''
if attr not in self.properties():
raise ValueError("%r is not a property of self (%r)" % (attr, self))
if not isinstance(other, Model):
raise ValueError("'other' is not a Bokeh model: %r" % other)
if other_attr not in other.properties():
raise ValueError("%r is not a property of other (%r)" % (other_attr, other))
from bokeh.models.callbacks import CustomJS
cb = CustomJS(args=dict(other=other), code="other.%s = this.%s" % (other_attr, attr))
self.js_on_change(attr, cb)
def js_on_change(self, event, *callbacks):
''' Attach a ``CustomJS`` callback to an arbitrary BokehJS model event.
On the BokehJS side, change events for model properties have the
form ``"change:property_name"``. As a convenience, if the event name
passed to this method is also the name of a property on the model,
then it will be prefixed with ``"change:"`` automatically:
.. code:: python
# these two are equivalent
source.js_on_change('data', callback)
source.js_on_change('change:data', callback)
However, there are other kinds of events that can be useful to respond
to, in addition to property change events. For example to run a
callback whenever data is streamed to a ``ColumnDataSource``, use the
``"stream"`` event on the source:
.. code:: python
source.js_on_change('streaming', callback)
'''
if len(callbacks) == 0:
raise ValueError("js_on_change takes an event name and one or more callbacks, got only one parameter")
# handle any CustomJS callbacks here
from bokeh.models.callbacks import CustomJS
if not all(isinstance(x, CustomJS) for x in callbacks):
raise ValueError("not all callback values are CustomJS instances")
if event in self.properties():
event = "change:%s" % event
if event not in self.js_property_callbacks:
self.js_property_callbacks[event] = []
for callback in callbacks:
if callback in self.js_property_callbacks[event]:
continue
self.js_property_callbacks[event].append(callback)
def layout(self, side, plot):
'''
'''
try:
return self in getattr(plot, side)
except:
return []
def on_change(self, attr, *callbacks):
''' Add a callback on this object to trigger when ``attr`` changes.
Args:
attr (str) : an attribute name on this object
*callbacks (callable) : callback functions to register
Returns:
None
Example:
.. code-block:: python
widget.on_change('value', callback1, callback2, ..., callback_n)
'''
if attr not in self.properties():
raise ValueError("attempted to add a callback on nonexistent %s.%s property" % (self.__class__.__name__, attr))
super(Model, self).on_change(attr, *callbacks)
def references(self):
''' Returns all ``Models`` that this object has references to.
'''
return set(collect_models(self))
def select(self, selector):
''' Query this object and all of its references for objects that
match the given selector.
Args:
selector (JSON-like) :
Returns:
seq[Model]
'''
return find(self.references(), selector)
def select_one(self, selector):
''' Query this object and all of its references for objects that
match the given selector. Raises an error if more than one object
is found. Returns single matching object, or None if nothing is found
Args:
selector (JSON-like) :
Returns:
Model
'''
result = list(self.select(selector))
if len(result) > 1:
raise ValueError("Found more than one object matching %s: %r" % (selector, result))
if len(result) == 0:
return None
return result[0]
def set_select(self, selector, updates):
''' Update objects that match a given selector with the specified
attribute/value updates.
Args:
selector (JSON-like) :
updates (dict) :
Returns:
None
'''
for obj in self.select(selector):
for key, val in updates.items():
setattr(obj, key, val)
def to_json(self, include_defaults):
''' Returns a dictionary of the attributes of this object,
containing only "JSON types" (string, number, boolean,
none, dict, list).
References to other objects are serialized as "refs" (just
the object ID and type info), so the deserializer will
need to separately have the full attributes of those
other objects.
There's no corresponding ``from_json()`` because to
deserialize an object is normally done in the context of a
Document (since the Document can resolve references).
For most purposes it's best to serialize and deserialize
entire documents.
Args:
include_defaults (bool) : whether to include attributes
that haven't been changed from the default
'''
return loads(self.to_json_string(include_defaults=include_defaults))
def to_json_string(self, include_defaults):
''' Returns a JSON string encoding the attributes of this object.
References to other objects are serialized as references
(just the object ID and type info), so the deserializer
will need to separately have the full attributes of those
other objects.
There's no corresponding ``from_json_string()`` because to
deserialize an object is normally done in the context of a
Document (since the Document can resolve references).
For most purposes it's best to serialize and deserialize
entire documents.
Args:
include_defaults (bool) : whether to include attributes
that haven't been changed from the default
'''
json_like = self._to_json_like(include_defaults=include_defaults)
json_like['id'] = self.id
# serialize_json "fixes" the JSON from _to_json_like by converting
# all types into plain JSON types # (it converts Model into refs,
# for example).
return serialize_json(json_like)
def trigger(self, attr, old, new, hint=None, setter=None):
'''
'''
# The explicit assumption here is that hinted events do not need to
# go through all the same invalidation steps. Currently this is the
# case for ColumnsStreamedEvent and ColumnsPatchedEvent. However,
# this may need to be further refined in the future, if the
# assumption does not hold for future hinted events (e.g. the hint
# could specify explicitly whether to do normal invalidation or not)
if hint is None:
dirty = { 'count' : 0 }
def mark_dirty(obj):
dirty['count'] += 1
if self._document is not None:
_visit_value_and_its_immediate_references(new, mark_dirty)
_visit_value_and_its_immediate_references(old, mark_dirty)
if dirty['count'] > 0:
self._document._invalidate_all_models()
# chain up to invoke callbacks
super(Model, self).trigger(attr, old, new, hint=hint, setter=setter)
def _attach_document(self, doc):
''' Attach a model to a Bokeh |Document|.
This private interface should only ever called by the Document
implementation to set the private ._document field properly
'''
if self._document is not None and self._document is not doc:
raise RuntimeError("Models must be owned by only a single document, %r is already in a doc" % (self))
doc.theme.apply_to_model(self)
self._document = doc
self._update_event_callbacks()
@staticmethod
def _clear_extensions():
MetaModel.model_class_reverse_map = {
k:v for k,v in MetaModel.model_class_reverse_map.items()
if getattr(v, "__implementation__", None) is None
}
def _detach_document(self):
''' Detach a model from a Bokeh |Document|.
This private interface should only ever called by the Document
implementation to unset the private ._document field properly
'''
self._document = None
default_theme.apply_to_model(self)
def _to_json_like(self, include_defaults):
''' Returns a dictionary of the attributes of this object, in
a layout corresponding to what BokehJS expects at unmarshalling time.
This method does not convert "Bokeh types" into "plain JSON types,"
for example each child Model will still be a Model, rather
than turning into a reference, numpy isn't handled, etc.
That's what "json like" means.
This method should be considered "private" or "protected",
for use internal to Bokeh; use ``to_json()`` instead because
it gives you only plain JSON-compatible types.
Args:
include_defaults (bool) : whether to include attributes
that haven't been changed from the default.
'''
all_attrs = self.properties_with_values(include_defaults=include_defaults)
# If __subtype__ is defined, then this model may introduce properties
# that don't exist on __view_model__ in bokehjs. Don't serialize such
# properties.
subtype = getattr(self.__class__, "__subtype__", None)
if subtype is not None and subtype != self.__class__.__view_model__:
attrs = {}
for attr, value in all_attrs.items():
if attr in self.__class__.__dict__:
continue
else:
attrs[attr] = value
else:
attrs = all_attrs
for (k, v) in attrs.items():
# we can't serialize Infinity, we send it as None and
# the other side has to fix it up. This transformation
# can't be in our json_encoder because the json
# module checks for inf before it calls the custom
# encoder.
if isinstance(v, float) and v == float('inf'):
attrs[k] = None
return attrs
def _repr_html_(self):
'''
'''
module = self.__class__.__module__
name = self.__class__.__name__
_id = getattr(self, "_id", None)
cls_name = make_id()
def row(c):
return '<div style="display: table-row;">' + c + '</div>'
def hidden_row(c):
return '<div class="%s" style="display: none;">%s</div>' % (cls_name, c)
def cell(c):
return '<div style="display: table-cell;">' + c + '</div>'
html = ''
html += '<div style="display: table;">'
ellipsis_id = make_id()
ellipsis = '<span id="%s" style="cursor: pointer;">…)</span>' % ellipsis_id
prefix = cell('<b title="%s.%s">%s</b>(' % (module, name, name))
html += row(prefix + cell('id' + ' = ' + repr(_id) + ', ' + ellipsis))
props = self.properties_with_values().items()
sorted_props = sorted(props, key=itemgetter(0))
all_props = sorted_props
for i, (prop, value) in enumerate(all_props):
end = ')' if i == len(all_props)-1 else ','
html += hidden_row(cell("") + cell(prop + ' = ' + repr(value) + end))
html += '</div>'
html += _HTML_REPR % dict(ellipsis_id=ellipsis_id, cls_name=cls_name)
return html
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
def _visit_immediate_value_references(value, visitor):
''' Visit all references to another Model without recursing into any
of the child Model; may visit the same Model more than once if
it's referenced more than once. Does not visit the passed-in value.
'''
if isinstance(value, HasProps):
for attr in value.properties_with_refs():
child = getattr(value, attr)
_visit_value_and_its_immediate_references(child, visitor)
else:
_visit_value_and_its_immediate_references(value, visitor)
_common_types = {int, float, str}
def _visit_value_and_its_immediate_references(obj, visitor):
''' Recurse down Models, HasProps, and Python containers
The ordering in this function is to optimize performance. We check the
most comomn types (int, float, str) first so that we can quickly return in
the common case. We avoid isinstance and issubclass checks in a couple
places with `type` checks because isinstance checks can be slow.
'''
typ = type(obj)
if typ in _common_types: # short circuit on common base types
return
if typ is list or issubclass(typ, (list, tuple)): # check common containers
for item in obj:
_visit_value_and_its_immediate_references(item, visitor)
elif issubclass(typ, dict):
for key, value in iteritems(obj):
_visit_value_and_its_immediate_references(key, visitor)
_visit_value_and_its_immediate_references(value, visitor)
elif issubclass(typ, HasProps):
if issubclass(typ, Model):
visitor(obj)
else:
# this isn't a Model, so recurse into it
_visit_immediate_value_references(obj, visitor)
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
bsd-3-clause
| 237,578,417,187,544,830
| 33.555427
| 123
| 0.574169
| false
| 4.492569
| false
| false
| false
|
mjamesruggiero/lily
|
lily/apriori.py
|
1
|
4157
|
from collections import defaultdict
import logging
logging.basicConfig(level=logging.INFO, format="%(funcName)s\t%(message)s")
def createC1(dataset):
C1 = []
for transaction in dataset:
for item in transaction:
if not [item] in C1:
C1.append([item])
C1.sort()
return map(frozenset, C1)
def scan_d(dataset, candidate_sets, minimum_support):
ss_count = count_candiates(candidate_sets, dataset)
num_items = float(len(dataset))
return_list = []
support_data = {}
for key in ss_count:
support = ss_count[key] / num_items
if support >= minimum_support:
return_list.insert(0, key)
support_data[key] = support
return return_list, support_data
def count_candiates(candidate_sets, dataset):
counts = defaultdict(int)
for element in dataset:
for candidate in candidate_sets:
if candidate.issubset(element):
counts[candidate] += 1
return counts
def apriori_generate(Lk, k):
"""
Takes a list of frequent itemsets, Lk
and the size of the sets, to produce
candidate itemsets.
"""
return_list = []
len_Lk = len(Lk)
for i in range(len_Lk):
for j in range(i + 1, len_Lk):
L1 = list(Lk[i])[:k - 2]
L2 = list(Lk[j])[:k - 2]
L1.sort()
L2.sort()
if L1 == L2:
return_list.append(Lk[i] | Lk[j]) # set union
return return_list
def apriori(dataset, minimum_support=0.5):
C1 = createC1(dataset)
D = map(set, dataset)
L1, support_data = scan_d(D, C1, minimum_support)
L = [L1]
k = 2
while(len(L[k - 2]) > 0):
Ck = apriori_generate(L[k - 2], k)
Lk, support_k = scan_d(D, Ck, minimum_support)
support_data.update(support_k)
L.append(Lk)
k += 1
return L, support_data
def generate_rules(L, support_data, minimum_confidence=0.7):
big_rule_list = []
for i in range(1, len(L)):
for freq_set in L[i]:
h1 = [frozenset([item]) for item in freq_set]
if i > 1:
rules_from_consequent(freq_set,
h1,
support_data,
big_rule_list,
minimum_confidence)
else:
calculate_confidence(freq_set,
h1,
support_data,
big_rule_list,
minimum_confidence)
return big_rule_list
def calculate_confidence(freq_set,
h,
support_data,
big_rule_list,
minimum_confidence):
pruned_h = []
for conseq in h:
conf = support_data[freq_set] / support_data[freq_set - conseq]
if conf >= minimum_confidence:
big_rule_list.append((freq_set - conseq, conseq, conf))
pruned_h.append(conseq)
return pruned_h
def rules_from_consequent(freq_set,
h,
support_data,
big_rule_list,
minimum_confidence=0.7):
"""
TODO: instead of moving large param list around,
use an object
"""
m = len(h[0])
if len(freq_set) > (m + 1): # merge it more
new_candidates = apriori_generate(h, m + 1)
new_candidates = calculate_confidence(freq_set,
new_candidates,
support_data,
big_rule_list,
minimum_confidence)
if len(new_candidates) > 1: # need at least 2 sets to merge
rules_from_consequent(freq_set,
new_candidates,
support_data,
big_rule_list,
minimum_confidence)
|
apache-2.0
| 2,322,242,299,370,424
| 30.976923
| 75
| 0.479192
| false
| 4.035922
| false
| false
| false
|
mementum/backtrader
|
backtrader/utils/flushfile.py
|
1
|
1588
|
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015, 2016, 2017 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
class flushfile(object):
def __init__(self, f):
self.f = f
def write(self, x):
self.f.write(x)
self.f.flush()
def flush(self):
self.f.flush()
if sys.platform == 'win32':
sys.stdout = flushfile(sys.stdout)
sys.stderr = flushfile(sys.stderr)
class StdOutDevNull(object):
def __init__(self):
self.stdout = sys.stdout
sys.stdout = self
def write(self, x):
pass
def flush(self):
pass
def stop(self):
sys.stdout = self.stdout
|
gpl-3.0
| 289,876,395,016,867,500
| 26.859649
| 79
| 0.588161
| false
| 3.98995
| false
| false
| false
|
exildev/webpage
|
exile/migrations/0007_auto_20170511_2256.py
|
1
|
2816
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-05-11 22:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('exile', '0006_seccion_posicion'),
]
operations = [
migrations.CreateModel(
name='Contacto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=300)),
('email', models.EmailField(max_length=254)),
('asunto', models.CharField(max_length=300)),
('mensaje', models.TextField()),
],
),
migrations.CreateModel(
name='Footer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=300)),
],
),
migrations.CreateModel(
name='FooterPrincipal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha', models.DateField(auto_now_add=True)),
('footer', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='exile.Footer')),
],
options={
'verbose_name': 'Footer Principal',
'verbose_name_plural': 'Footer Principal',
},
),
migrations.CreateModel(
name='OrdenFooter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('posicion', models.IntegerField()),
('footer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='exile.Footer')),
('pagina', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='exile.Page')),
],
),
migrations.CreateModel(
name='SeccionFooter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=400)),
],
),
migrations.AddField(
model_name='ordenfooter',
name='seccion',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='exile.SeccionFooter'),
),
migrations.AddField(
model_name='footer',
name='paginas',
field=models.ManyToManyField(through='exile.OrdenFooter', to='exile.Page'),
),
]
|
mit
| -656,454,561,851,177,300
| 38.661972
| 114
| 0.550426
| false
| 4.221889
| false
| false
| false
|
lmregus/Portfolio
|
python/design_patterns/env/lib/python3.7/site-packages/_pytest/reports.py
|
1
|
13737
|
from pprint import pprint
import py
import six
from _pytest._code.code import ExceptionInfo
from _pytest._code.code import ReprEntry
from _pytest._code.code import ReprEntryNative
from _pytest._code.code import ReprExceptionInfo
from _pytest._code.code import ReprFileLocation
from _pytest._code.code import ReprFuncArgs
from _pytest._code.code import ReprLocals
from _pytest._code.code import ReprTraceback
from _pytest._code.code import TerminalRepr
from _pytest.outcomes import skip
from _pytest.pathlib import Path
def getslaveinfoline(node):
try:
return node._slaveinfocache
except AttributeError:
d = node.slaveinfo
ver = "%s.%s.%s" % d["version_info"][:3]
node._slaveinfocache = s = "[%s] %s -- Python %s %s" % (
d["id"],
d["sysplatform"],
ver,
d["executable"],
)
return s
class BaseReport(object):
when = None
location = None
def __init__(self, **kw):
self.__dict__.update(kw)
def toterminal(self, out):
if hasattr(self, "node"):
out.line(getslaveinfoline(self.node))
longrepr = self.longrepr
if longrepr is None:
return
if hasattr(longrepr, "toterminal"):
longrepr.toterminal(out)
else:
try:
out.line(longrepr)
except UnicodeEncodeError:
out.line("<unprintable longrepr>")
def get_sections(self, prefix):
for name, content in self.sections:
if name.startswith(prefix):
yield prefix, content
@property
def longreprtext(self):
"""
Read-only property that returns the full string representation
of ``longrepr``.
.. versionadded:: 3.0
"""
tw = py.io.TerminalWriter(stringio=True)
tw.hasmarkup = False
self.toterminal(tw)
exc = tw.stringio.getvalue()
return exc.strip()
@property
def caplog(self):
"""Return captured log lines, if log capturing is enabled
.. versionadded:: 3.5
"""
return "\n".join(
content for (prefix, content) in self.get_sections("Captured log")
)
@property
def capstdout(self):
"""Return captured text from stdout, if capturing is enabled
.. versionadded:: 3.0
"""
return "".join(
content for (prefix, content) in self.get_sections("Captured stdout")
)
@property
def capstderr(self):
"""Return captured text from stderr, if capturing is enabled
.. versionadded:: 3.0
"""
return "".join(
content for (prefix, content) in self.get_sections("Captured stderr")
)
passed = property(lambda x: x.outcome == "passed")
failed = property(lambda x: x.outcome == "failed")
skipped = property(lambda x: x.outcome == "skipped")
@property
def fspath(self):
return self.nodeid.split("::")[0]
@property
def count_towards_summary(self):
"""
**Experimental**
Returns True if this report should be counted towards the totals shown at the end of the
test session: "1 passed, 1 failure, etc".
.. note::
This function is considered **experimental**, so beware that it is subject to changes
even in patch releases.
"""
return True
@property
def head_line(self):
"""
**Experimental**
Returns the head line shown with longrepr output for this report, more commonly during
traceback representation during failures::
________ Test.foo ________
In the example above, the head_line is "Test.foo".
.. note::
This function is considered **experimental**, so beware that it is subject to changes
even in patch releases.
"""
if self.location is not None:
fspath, lineno, domain = self.location
return domain
def _to_json(self):
"""
This was originally the serialize_report() function from xdist (ca03269).
Returns the contents of this report as a dict of builtin entries, suitable for
serialization.
Experimental method.
"""
def disassembled_report(rep):
reprtraceback = rep.longrepr.reprtraceback.__dict__.copy()
reprcrash = rep.longrepr.reprcrash.__dict__.copy()
new_entries = []
for entry in reprtraceback["reprentries"]:
entry_data = {
"type": type(entry).__name__,
"data": entry.__dict__.copy(),
}
for key, value in entry_data["data"].items():
if hasattr(value, "__dict__"):
entry_data["data"][key] = value.__dict__.copy()
new_entries.append(entry_data)
reprtraceback["reprentries"] = new_entries
return {
"reprcrash": reprcrash,
"reprtraceback": reprtraceback,
"sections": rep.longrepr.sections,
}
d = self.__dict__.copy()
if hasattr(self.longrepr, "toterminal"):
if hasattr(self.longrepr, "reprtraceback") and hasattr(
self.longrepr, "reprcrash"
):
d["longrepr"] = disassembled_report(self)
else:
d["longrepr"] = six.text_type(self.longrepr)
else:
d["longrepr"] = self.longrepr
for name in d:
if isinstance(d[name], (py.path.local, Path)):
d[name] = str(d[name])
elif name == "result":
d[name] = None # for now
return d
@classmethod
def _from_json(cls, reportdict):
"""
This was originally the serialize_report() function from xdist (ca03269).
Factory method that returns either a TestReport or CollectReport, depending on the calling
class. It's the callers responsibility to know which class to pass here.
Experimental method.
"""
if reportdict["longrepr"]:
if (
"reprcrash" in reportdict["longrepr"]
and "reprtraceback" in reportdict["longrepr"]
):
reprtraceback = reportdict["longrepr"]["reprtraceback"]
reprcrash = reportdict["longrepr"]["reprcrash"]
unserialized_entries = []
reprentry = None
for entry_data in reprtraceback["reprentries"]:
data = entry_data["data"]
entry_type = entry_data["type"]
if entry_type == "ReprEntry":
reprfuncargs = None
reprfileloc = None
reprlocals = None
if data["reprfuncargs"]:
reprfuncargs = ReprFuncArgs(**data["reprfuncargs"])
if data["reprfileloc"]:
reprfileloc = ReprFileLocation(**data["reprfileloc"])
if data["reprlocals"]:
reprlocals = ReprLocals(data["reprlocals"]["lines"])
reprentry = ReprEntry(
lines=data["lines"],
reprfuncargs=reprfuncargs,
reprlocals=reprlocals,
filelocrepr=reprfileloc,
style=data["style"],
)
elif entry_type == "ReprEntryNative":
reprentry = ReprEntryNative(data["lines"])
else:
_report_unserialization_failure(entry_type, cls, reportdict)
unserialized_entries.append(reprentry)
reprtraceback["reprentries"] = unserialized_entries
exception_info = ReprExceptionInfo(
reprtraceback=ReprTraceback(**reprtraceback),
reprcrash=ReprFileLocation(**reprcrash),
)
for section in reportdict["longrepr"]["sections"]:
exception_info.addsection(*section)
reportdict["longrepr"] = exception_info
return cls(**reportdict)
def _report_unserialization_failure(type_name, report_class, reportdict):
url = "https://github.com/pytest-dev/pytest/issues"
stream = py.io.TextIO()
pprint("-" * 100, stream=stream)
pprint("INTERNALERROR: Unknown entry type returned: %s" % type_name, stream=stream)
pprint("report_name: %s" % report_class, stream=stream)
pprint(reportdict, stream=stream)
pprint("Please report this bug at %s" % url, stream=stream)
pprint("-" * 100, stream=stream)
raise RuntimeError(stream.getvalue())
class TestReport(BaseReport):
""" Basic test report object (also used for setup and teardown calls if
they fail).
"""
__test__ = False
def __init__(
self,
nodeid,
location,
keywords,
outcome,
longrepr,
when,
sections=(),
duration=0,
user_properties=None,
**extra
):
#: normalized collection node id
self.nodeid = nodeid
#: a (filesystempath, lineno, domaininfo) tuple indicating the
#: actual location of a test item - it might be different from the
#: collected one e.g. if a method is inherited from a different module.
self.location = location
#: a name -> value dictionary containing all keywords and
#: markers associated with a test invocation.
self.keywords = keywords
#: test outcome, always one of "passed", "failed", "skipped".
self.outcome = outcome
#: None or a failure representation.
self.longrepr = longrepr
#: one of 'setup', 'call', 'teardown' to indicate runtest phase.
self.when = when
#: user properties is a list of tuples (name, value) that holds user
#: defined properties of the test
self.user_properties = list(user_properties or [])
#: list of pairs ``(str, str)`` of extra information which needs to
#: marshallable. Used by pytest to add captured text
#: from ``stdout`` and ``stderr``, but may be used by other plugins
#: to add arbitrary information to reports.
self.sections = list(sections)
#: time it took to run just the test
self.duration = duration
self.__dict__.update(extra)
def __repr__(self):
return "<TestReport %r when=%r outcome=%r>" % (
self.nodeid,
self.when,
self.outcome,
)
@classmethod
def from_item_and_call(cls, item, call):
"""
Factory method to create and fill a TestReport with standard item and call info.
"""
when = call.when
duration = call.stop - call.start
keywords = {x: 1 for x in item.keywords}
excinfo = call.excinfo
sections = []
if not call.excinfo:
outcome = "passed"
longrepr = None
else:
if not isinstance(excinfo, ExceptionInfo):
outcome = "failed"
longrepr = excinfo
elif excinfo.errisinstance(skip.Exception):
outcome = "skipped"
r = excinfo._getreprcrash()
longrepr = (str(r.path), r.lineno, r.message)
else:
outcome = "failed"
if call.when == "call":
longrepr = item.repr_failure(excinfo)
else: # exception in setup or teardown
longrepr = item._repr_failure_py(
excinfo, style=item.config.option.tbstyle
)
for rwhen, key, content in item._report_sections:
sections.append(("Captured %s %s" % (key, rwhen), content))
return cls(
item.nodeid,
item.location,
keywords,
outcome,
longrepr,
when,
sections,
duration,
user_properties=item.user_properties,
)
class CollectReport(BaseReport):
when = "collect"
def __init__(self, nodeid, outcome, longrepr, result, sections=(), **extra):
self.nodeid = nodeid
self.outcome = outcome
self.longrepr = longrepr
self.result = result or []
self.sections = list(sections)
self.__dict__.update(extra)
@property
def location(self):
return (self.fspath, None, self.fspath)
def __repr__(self):
return "<CollectReport %r lenresult=%s outcome=%r>" % (
self.nodeid,
len(self.result),
self.outcome,
)
class CollectErrorRepr(TerminalRepr):
def __init__(self, msg):
self.longrepr = msg
def toterminal(self, out):
out.line(self.longrepr, red=True)
def pytest_report_to_serializable(report):
if isinstance(report, (TestReport, CollectReport)):
data = report._to_json()
data["_report_type"] = report.__class__.__name__
return data
def pytest_report_from_serializable(data):
if "_report_type" in data:
if data["_report_type"] == "TestReport":
return TestReport._from_json(data)
elif data["_report_type"] == "CollectReport":
return CollectReport._from_json(data)
assert False, "Unknown report_type unserialize data: {}".format(
data["_report_type"]
)
|
mit
| 2,341,126,185,610,534,400
| 31.17096
| 98
| 0.549538
| false
| 4.490683
| true
| false
| false
|
Jonahss/magnificent
|
neilk/monitor.py
|
1
|
2086
|
#
# Monitor a URL continuously, providing
# reports on its availability in a log file
# and as a web page.
#
import json
import logging
import sys
from twisted.internet import task
from twisted.internet import reactor
from twisted.web import server, resource
import urllib2
config = {}
log = logging.getLogger(__name__)
checks = 0
successes = 0
failures = 0
def log_to_stderr(log):
""" set up logging on standard error """
format_str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
logging.basicConfig(stream=sys.stderr,
format=format_str,
level=logging.DEBUG)
def health_check():
""" perform the health check for a URL """
global config, log, checks, successes, failures
request = urllib2.Request(config["url"])
checks += 1
try:
response = urllib2.urlopen(request)
log.info("%s is okay! (%s)", config["url"], response.getcode())
successes += 1
except urllib2.URLError, e:
log.info("%s is ERROR! (%s)", config["url"], e)
failures += 1
def generate_report():
""" format a string with current report """
report = "%i checks, %i failures, %.2f%% success rate"
return report % (checks,
failures,
100 * float(successes)/checks)
def log_health_report():
""" log the report """
log.info("REPORT: " + generate_report())
class MonitorSite(resource.Resource):
""" simple twisted site, gives the report out on the web """
isLeaf = True
def render_GET(self, request):
return generate_report()
if __name__ == "__main__":
log_to_stderr(log)
config = json.loads(open("monitor_config.json", "rb").read())
site = server.Site(MonitorSite())
reactor.listenTCP(config["port"], site)
log.info("Started site on port %i", config["port"])
check_loop = task.LoopingCall(health_check)
check_loop.start(config["url_frequency"])
report_loop = task.LoopingCall(log_health_report)
report_loop.start(config["report_frequency"])
reactor.run()
|
mit
| -6,028,109,550,642,355,000
| 25.075
| 71
| 0.624161
| false
| 3.827523
| true
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.