repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
nicozhang/pyspider
|
pyspider/scheduler/scheduler.py
|
Python
|
apache-2.0
| 46,109
| 0.001431
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-02-07 17:05:11
import itertools
import json
import logging
import os
import time
from collections import deque
from six import iteritems, itervalues
from six.moves import queue as Queue
from pyspider.libs import counter, utils
from pyspider.libs.base_handler import BaseHandler
from .task_queue import TaskQueue
logger = logging.getLogger('scheduler')
class Project(object):
'''
project for scheduler
'''
def __init__(self, scheduler, project_info):
'''
'''
self.scheduler = scheduler
self.active_tasks = deque(maxlen=scheduler.ACTIVE_TASKS)
self.task_queue = TaskQueue()
self.task_loaded = False
self._selected_tasks = False # selected tasks after recent pause
self._send_finished_event_wait = 0 # wait for scheduler.FAIL_PAUSE_NUM loop steps before sending the event
self.md5sum = None
self._send_on_get_info = False
self.waiting_get_info = True
self._paused = False
self._paused_time = 0
self._unpause_last_seen = None
self.update(project_info)
@property
def paused(self):
# unpaused --(last FAIL_PAUSE_NUM task failed)--> paused --(PAUSE_TIME)--> unpause_checking
# unpaused <--(last UNPAUSE_CHECK_NUM task have success)--|
# paused <--(last UNPAUSE_CHECK_NUM task no success)--|
if not self._paused:
fail_cnt = 0
for _, task in self.active_tasks:
# ignore select task
if task.get('type') == self.scheduler.TASK_PACK:
continue
if 'process' not in task['track']:
logger.error('process not in task, %r', task)
if task['track']['process']['ok']:
break
else:
fail_cnt += 1
if fail_cnt >= self.scheduler.FAIL_PAUSE_NUM:
break
if fail_cnt >= self.scheduler.FAIL_PAUSE_NUM:
self._paused = True
self._paused_time = time.time()
elif self._paused is True and (self._paused_time + self.scheduler.PAUSE_TIME < time.time()):
self._paused = 'checking'
self._unpause_last_seen = self.active_tasks[0][1] if len(self.active_tasks) else None
elif self._paused == 'checking':
cnt = 0
fail_cnt = 0
for _, task in self.active_tasks:
if task is self._unpause_last_seen:
break
# ignore select task
if task.get('type') == self.scheduler.TASK_PACK:
continue
cnt += 1
if task['track']['process']['ok']:
# break with enough check cnt
cnt = max(cnt, self.scheduler.UNPAUSE_CHECK_NUM)
break
else:
fail_cnt += 1
if cnt >= self.scheduler.UNPAUSE_CHECK_NUM:
if fail_cnt == cnt:
self._paused = True
self._paused_time = time.time()
else:
self._paused = False
return self._paused is True
def update(self, project_info):
self.project_info = project_info
self.name = project_info['name']
self.group = project_info['group']
self.db_status = project_info['status']
self.updatetime = project_info['updatetime']
md5sum = utils.md5string(project_info['script'])
if (self.md5sum != md5sum or self.waiting_get_info) and self.active:
self._send_on_get_info = True
self.waiting_get_info = True
self.md5sum = md5sum
if self.active:
self.task_queue.rate = project_info['rate']
self.task_queue.burst = project_info['burst']
else:
self.task_queue.rate = 0
self.task_queue.burst = 0
logger.info('project %s updated, status:%s, paused:%s, %d tasks',
self.name, self.db_status, self.paused, len(self.task_queue))
def on_get_info(self, info):
self.waiting_get_info = False
self.min_tick = info.get('min_tick', 0)
self.retry_delay = info.get('retry_delay', {})
self.crawl_config = info.get('crawl_config', {})
@property
def active(self):
return self.db_status in ('RUNNING', 'DEBUG')
class Scheduler(object):
UPDATE_PROJECT_INTERVAL = 5 * 60
default_schedule = {
'priority': 0,
'retries': 3,
'exetime': 0,
'age': -1,
'itag': None,
}
LOOP_LIMIT = 1000
LOOP_INTERVAL = 0.1
ACTIVE_TASKS = 100
INQUEUE_LIMIT = 0
EXCEPTION_LIMIT = 3
DELETE_TIME = 24 * 60 * 60
DEFAULT_RETRY_DELAY = {
0: 30,
1: 1*60*60,
2: 6*60*60,
3: 12*60*60,
'': 24*60*60
}
FAIL_PAUSE_NUM = 10
PAUSE_TIME = 5*60
UNPAUSE_CHECK_NUM = 3
TASK_PACK = 1
STATUS_PACK = 2 # current not used
REQUEST_PACK = 3 # current not used
def __init__(self, taskdb, projectdb, newtask_queue, status_queue,
out_queue, data_path='./data', resultdb=None):
self.taskdb = taskdb
self.projectdb = projectdb
self.resultdb = resultdb
self.newtask_queue = newtask_queue
self.status_queue = status_queue
self.out_queue = out_queue
self.data_path = data_path
self._send_buffer = deque()
self._quit = False
self._exceptions = 0
self.projects = dict()
self._force_update_project = False
self._last_update_project = 0
self._last_tick = int(time.time())
self._postpone_request = []
self._cnt = {
"5m_time": counter.CounterManager(
lambda: counter.TimebaseAverageEventCounter(30, 10)),
"5m": counter.CounterManager(
lambda: counter.TimebaseAverageWindowCounter(30, 10)),
"1h": counter.CounterManager(
lambda: counter.TimebaseAverageWindowCounter(60, 60)),
"1d": counter.CounterManager(
lambda: counter.TimebaseAverageWindowCounter(10 * 60, 24 * 6)),
"all": counter.CounterManager(
lambda: counter.TotalCounter()),
}
self._cnt['1h'].load(os
|
.path.join(self.data_path, 'scheduler.1h'))
self._cnt['1d'].load(os.path.join(self.data_path, 'scheduler.1d'))
self._cnt['all'].load(os.path.join(self.data_path, 'scheduler.all'))
self._last_dump_cnt = 0
def _update_project
|
s(self):
'''Check project update'''
now = time.time()
if (
not self._force_update_project
and self._last_update_project + self.UPDATE_PROJECT_INTERVAL > now
):
return
for project in self.projectdb.check_update(self._last_update_project):
self._update_project(project)
logger.debug("project: %s updated.", project['name'])
self._force_update_project = False
self._last_update_project = now
get_info_attributes = ['min_tick', 'retry_delay', 'crawl_config']
def _update_project(self, project):
'''update one project'''
if project['name'] not in self.projects:
self.projects[project['name']] = Project(self, project)
else:
self.projects[project['name']].update(project)
project = self.projects[project['name']]
if project._send_on_get_info:
# update project runtime info from processor by sending a _on_get_info
# request, result is in status_page.track.save
project._send_on_get_info = False
self.on_select_task({
'taskid': '_on_get_info',
'project': project.name,
'url': 'data:,_on_get_info',
'status': self.taskdb.SUC
|
smathot/qnotero
|
libqnotero/qt/QtCore.py
|
Python
|
gpl-2.0
| 777
| 0.003861
|
#-*- coding:utf-8 -*-
"""
This file is part of OpenSesame.
OpenSesame is free software: you can
|
redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenSesame is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of t
|
he GNU General Public License
along with OpenSesame. If not, see <http://www.gnu.org/licenses/>.
"""
from libqnotero import qt
if qt.pyqt == 5:
from PyQt5.QtCore import *
else:
from PyQt4.QtCore import *
|
mozilla/gameon
|
gameon/submissions/migrations/0005_auto__add_field_category_description.py
|
Python
|
bsd-3-clause
| 6,583
| 0.007899
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Category.description'
db.add_column('submissions_category', 'description',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Category.description'
db.delete_column('submissions_category', 'description')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'submissions.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'submissions.challenge': {
'Meta': {'object_name': 'Challenge'},
'end_date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {})
},
'submissions.entry': {
'Meta': {'object_name': 'Entry'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Category']", 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.Profile']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'team_desciption': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'team_members': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'team_name': ('django.db.models.fields.C
|
harField', [], {'max_length': '255', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max
|
_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'to_market': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255'}),
'video_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '255'})
},
'users.profile': {
'Meta': {'object_name': 'Profile'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['submissions']
|
vikramraman/algorithms
|
python/editdistance.py
|
Python
|
mit
| 928
| 0.017241
|
# Author: Vikram Raman
# Date: 09-12-2015
import time
# edit distance between two strings
# e(i,j) = min (1 + e(i-1,j) | 1 + e(i,j-1) | diff(i,j) + e(i-1,j-1))
def editdistan
|
ce(s1, s2):
m = 0 if s1 is None else len(s1)
n = 0 if s2 is None else len(s2)
if m == 0:
return n
elif n == 0:
return m
l = [[i for i in range(0,n+1)]]
for i in range(1,m+1):
l.append([i])
for i in range(1,m+1):
for j in range(1,n+1):
minimum = min(1 + l[i-1][j], 1 + l[i][j-1],
|
diff(s1,s2,i,j) + l[i-1][j-1])
l[i].append(minimum)
return l[m][n]
def diff (s1, s2, i, j):
return s1[i-1] != s2[j-1]
s1 = "exponential"
s2 = "polynomial"
print "s1=%s, s2=%s" % (s1,s2)
start_time = time.clock()
distance=editdistance(s1, s2)
print "distance=%d" % (distance)
print("--- %s seconds ---" % (time.clock() - start_time))
print editdistance("foo", "bar")
|
velikodniy/python-fotki
|
fotki/user.py
|
Python
|
lgpl-3.0
| 3,334
| 0.002105
|
#!/usr/bin/env python
# coding:utf-8
# Copyright (c) 2011, Vadim Velikodniy <vadim-velikodniy@yandex.ru>
#
# This library is free so
|
ftware; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser G
|
eneral Public License for more details.
import httplib, json
import fotki.collection as collection
from fotki.auth import DEFAULT_AUTH
class UserException(Exception):
"""Класс-исключение для ошибок получения информации о пользователе."""
pass
class UserNotFound(UserException):
"""Исключение 'Пользователь не найден'"""
pass
class User(object):
"""Класс, соответствующий пользователям сервиса. Позволяет получить
списки фотографий, альбомов и тегов для указанного пользователя,
если есть соответствующие права."""
def __init__(self, username, auth=DEFAULT_AUTH):
"""Конструктор. Загружает сервисный документ.
auth - «аутентификационный» объект,
user - строка с именем пользователя."""
self._username = username
self._auth = auth
self._get_info()
def _get_info(self):
"""Загрузить сервисный документ и получить ссылки на коллекции
фотографий, альбомов и тегов."""
connection = httplib.HTTPConnection('api-fotki.yandex.ru')
headers = {'Accept': 'application/json'}
headers.update(self._auth.auth_header)
api_path = '/api/users/%s/' % self._username
connection.request('GET', api_path, headers=headers)
response = connection.getresponse()
data = response.read()
connection.close()
if response.status == 200:
self._info = json.loads(data)
elif response.status == 404:
raise UserNotFound('User %s not found' % self._username)
else:
raise UserException(response.reason)
def _get_title(self):
return self._info['title']
title = property(_get_title, doc='Заголовок')
def photos(self):
"""Возвращает общую коллекцию фотографий."""
url = self._info['collections']['photo-list']['href']
return collection.PhotoCollection(url, self._auth)
def albums(self):
"""Возвращет коллекцию альбомов."""
url = self._info['collections']['album-list']['href']
return collection.AlbumCollection(url, self._auth)
def tags(self):
"""Возвращает коллекцию тегов."""
url = self._info['collections']['tag-list']['href']
return collection.TagCollection(url, self._auth)
|
SKA-ScienceDataProcessor/algorithm-reference-library
|
deprecated_code/workflows/mpi/imaging-pipelines_serial.py
|
Python
|
apache-2.0
| 12,979
| 0.018106
|
# coding: utf-8
# # Pipeline processing using serial workflows.
#
# This notebook demonstrates the continuum imaging and ICAL pipelines. These are based on ARL functions wrapped up as SDP workflows using the serial class.
# In[1]:
#get_ipython().run_line_magic('matplotlib', 'inline')
import os
import sys
sys.path.append(os.path.join('..', '..'))
from data_models.parameters import arl_path
#results_dir = arl_path('test_results')
results_dir = './results/orig'
#from matplotlib import pylab
#pylab.rcParams['figure.figsize'] = (12.0, 12.0)
#pylab.rcParams['image.cmap'] = 'rainbow'
import numpy
from astropy.coordinates import SkyCoord
from astropy import units as u
from astropy.wcs.utils import pixel_to_skycoord
#from matplotlib import pyplot as plt
from data_models.polarisation import PolarisationFrame
from wrappers.serial.calibration.calibration import solve_gaintable
from wrappers.serial.calibration.operations import apply_gaintable
from wrappers.serial.calibration.calibration_control import create_calibration_controls
from wrappers.serial.visibility.base import create_blockvisibility
from wrappers.serial.visibility.coalesce import convert_blockvisibility_to_visibility, convert_visibility_to_blockvisibility
from wrappers.serial.skycomponent.operations import create_skycomponent
from wrappers.serial.image.deconvolution import deconvolve_cube
#from wrappers.serial.image.operations import show_image, export_image_to_fits, qa_image
from wrappers.serial.image.operations import export_image_to_fits, qa_image
from wrappers.serial.visibility.iterators import vis_timeslice_iter
from wrappers.serial.simulation.testing_support import create_low_test_image_from_gleam
from processing_components.simulation.configurations import create_named_configuration
from wrappers.serial.imaging.base import predict_2d, create_image_from_visibility, advise_wide_field
from workflows.serial.imaging.imaging_serial import invert_list_serial_workflow, predict_list_serial_workflow, deconvolve_list_serial_workflow
from workflows.serial.simulation.simulation_serial import simulate_list_serial_workflow, corrupt_list_serial_workflow
from workflows.serial.pipelines.pipeline_serial import continuum_imaging_list_serial_workflow, ical_list_serial_workflow
import pprint
import time
pp = pprint.PrettyPrinter()
import logging
def init_logging():
log = logging.getLogger()
logging.basicConfig(filename='%s/imaging-pipeline.log' % results_dir,
filemode='a',
format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
datefmt='%H:%M:%S',
level=logging.INFO)
return log
log = init_logging()
log.info("Starting imaging-pipeline")
# In[2]:
#pylab.rcParams['figure.figsize'] = (12.0, 12.0)
#pylab.rcParams['image.cmap'] = 'Greys'
# We make the visibility. The parameter rmax determines the distance of the furthest antenna/stations used. All over parameters are determined from this number.
# In[3]:
nfreqwin=7
ntimes=5
rmax=300.0
frequency=numpy.linspace(1.0e8,1.2e8,nfreqwin)
#ntimes=11
#rmax=300.0
#frequency=numpy.linspace(0.9e8,1.1e8,nfreqwin)
channel_bandwidth=numpy.array(nfreqwin*[frequency[1]-frequency[0]])
times = numpy.linspace(-numpy.pi/3.0, numpy.pi/3.0, ntimes)
#phasecentre=SkyCoord(ra=+30.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000')
phasecentre=SkyCoord(ra=+0.0 * u.deg, dec=-40.0 * u.deg, frame='icrs', equinox='J2000')
bvis_list=simulate_list_serial_workflow('LOWBD2',
frequency=frequency,
channel_bandwidth=channel_bandwidth,
times=times,
phasecentre=phasecentre,
order='frequency',
rmax=rmax)
vis_list = [convert_blockvisibility_to_visibility(bv) for bv in bvis_list]
print('%d elements in vis_list' % len(vis_list))
log.debug('%d elements in vis_list' % len(vis_list))
# In[4]:
wprojection_planes=1
advice_low=advise_wide_field(vis_list[0], guard_band_image=8.0, delA=0.02,
wprojection_planes=wprojection_planes)
advice_high=advise_wide_field(vis_list[-1], guard_band_image=8.0, delA=0.02,
wprojection_planes=wprojection_planes)
vis_slices = advice_low['vis_slices']
npixel=advice_high['npixels2']
cellsize=min(advice_low['cellsize'], advice_high['cellsize'])
# Now make a graph to fill with a model drawn from GLEAM
# In[ ]:
gleam_model = [create_low_test_image_from_gleam(npixel=npixel,
frequency=[frequency[f]],
channel_bandwidth=[channel_bandwidth[f]],
cellsize=cellsize,
phasecentre=phasecentre,
polarisation_frame=PolarisationFrame("stokesI"),
flux_limit=1.0,
applybeam=True)
for f, freq in enumerate(frequency)]
log.info('About to make GLEAM model')
# In[ ]:
log.info('About to run predict to get predicted visibility')
log.info('About to run predict to get predicted visibility')
start=time.time()
predicted_vislist = predict_list_serial_workflow(vis_list, gleam_model,
context='wstack', vis_slices=vis_slices)
#log.info('About to run corrupt to get corrupted visibility')
#corrupted_vislist = corrupt_list_serial_workflow(predicted_vislist, phase_error=1.0)
end=time.time()
print('predict finished in %f seconds'%(end-start),flush=True)
# Get the LSM. This is currently blank.
# In[ ]:
model_list = [create_image_from_visibility(vis_list[f],
npixel=npixel,
frequency=[frequency[f]],
|
channel_bandwidth=[channel_bandwidth[f]],
cellsize=cellsize,
phasecentre=phasecentre,
|
polarisation_frame=PolarisationFrame("stokesI"))
for f, freq in enumerate(frequency)]
# In[ ]:
start=time.time()
print('About to start invert' ,flush=True)
dirty_list = invert_list_serial_workflow(predicted_vislist, model_list,
context='wstack',
vis_slices=vis_slices, dopsf=False)
psf_list = invert_list_serial_workflow(predicted_vislist, model_list,
context='wstack',
vis_slices=vis_slices, dopsf=True)
end=time.time()
print('invert finished in %f seconds'%(end-start),flush=True)
# Create and execute graphs to make the dirty image and PSF
# In[ ]:
log.info('About to run invert to get dirty image')
dirty = dirty_list[0][0]
#show_image(dirty, cm='Greys', vmax=1.0, vmin=-0.1)
#plt.show()
print(qa_image(dirty))
export_image_to_fits(dirty, '%s/imaging-dirty.fits'
%(results_dir))
log.info('About to run invert to get PSF')
psf = psf_list[0][0]
#show_image(psf, cm='Greys', vmax=0.1, vmin=-0.01)
#plt.show()
print(qa_image(psf))
export_image_to_fits(psf, '%s/imaging-psf.fits'
%(results_dir))
# Now deconvolve using msclean
# In[ ]:
log.info('About to run deconvolve')
start=time.time()
deconvolved, _ = deconvolve_list_serial_workflow(dirty_list, psf_list, model_imagelist=model_list,
deconvolve_facets=8, deconvolve_overlap=16, deconvolve_taper='tukey',
scales=[0, 3, 10],
algorithm='msclean', niter=1000,
fractional_thresho
|
seldon/django-flexi-auth
|
flexi_auth/tests/models.py
|
Python
|
agpl-3.0
| 4,238
| 0.014158
|
# Copyright (C) 2011 REES Marche <http://www.reesmarche.org>
#
# This file is part of ``django-flexi-auth``.
# ``django-flexi-auth`` is free software: you can redistribute it and/or modify
# it under the t
|
erms of the GNU Affero General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# ``django-flexi-auth`` is distributed in the hope that it will be u
|
seful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with ``django-flexi-auth``. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from flexi_auth.models import PermissionBase
class Author(models.Model):
name = models.CharField(max_length=50)
surname = models.CharField(max_length=50)
class Magazine(models.Model):
name = models.CharField(max_length=50)
printing = models.IntegerField()
class Article(models.Model):
title = models.CharField(max_length=50)
body = models.TextField()
author = models.ForeignKey(Author)
published_to = models.ManyToManyField(Magazine)
def __unicode__(self):
return "An article with title '%s'" % self.title
##-------------- authorization API----------------##
# table-level CREATE permission
@classmethod
def can_create(cls, user, context):
# print "Now entering ``can_create`` method of model ``Article``..."
# print "Executing check for permission 'CREATE' on model %(cls)s for user %(user)s wrt context %(ctx)s"\
# % {'cls':cls, 'user':user, 'ctx':context}
if context:
website = context.get('website', None)
edition = context.get('edition', None)
if (website=="BarSite" or (website=="FooSite" and edition=="morning")):
return True
return False
# row-level VIEW permission
def can_view (self, user, context):
# print "Now entering ``can_view`` method of model ``Article``..."
# print "Executing check for permission 'VIEW' on instance %(self)s for user %(user)s wrt context %(ctx)s"\
# % {'self':self, 'user':user, 'ctx':context}
if context:
website = context.get('website', None)
edition = context.get('edition', None)
if (website=="BarSite" or (website=="FooSite" and edition=="morning")):
return True
return False
##-------------------------------------------------##
class Book(models.Model, PermissionBase):
title = models.CharField(max_length=50)
content = models.TextField()
authors = models.ManyToManyField(Author)
def __unicode__(self):
return "A book with title '%s'" % self.title
##-------------- authorization API----------------##
# table-level CREATE permission
@classmethod
def can_create(cls, user, context):
# print "Now entering ``can_create`` method of model ``Book``..."
# print "Executing check for permission 'CREATE' on model %(cls)s for user %(user)s wrt context %(ctx)s"\
# % {'cls':cls, 'user':user, 'ctx':context}
if context:
language = context.get('language', None)
cover = context.get('cover', None)
if (language=="Italian" or (language=="Dutch" and cover=="Paperback")):
return True
return False
# row-level VIEW permission
def can_view (self, user, context):
# print "Now entering ``can_view`` method of model ``Book``..."
# print "Executing check for permission 'VIEW' on instance %(self)s for user %(user)s wrt context %(ctx)s"\
# % {'self':self, 'user':user, 'ctx':context}
if context:
language = context.get('language', None)
cover = context.get('cover', None)
if (language=="Italian" or (language=="Dutch" and cover=="Paperback")):
return True
return False
##-------------------------------------------------##
|
py-mina-deploy/py-mina
|
py_mina/state.py
|
Python
|
mit
| 845
| 0.008284
|
"""
Deploy state manager
"""
from py_mina.utils import _AttributeDict
################################################################################
# Default state
################################################################################
state = _AttributeDict({
'pre_deploy': None,
'deploy': None,
'post_deploy': None,
'finalize': None,
'success': None,
'on_success': None,
})
#################
|
###############################################################
# Set state
##############
|
##################################################################
def set(key, value):
if key in state.keys():
state[key] = value
else:
raise Exception('State "%s" is not defined' % key)
# Alias to prevent conflict when importing "py_mina.config" and "py_mina.state"
set_state = set
|
richardliaw/ray
|
rllib/__init__.py
|
Python
|
apache-2.0
| 2,272
| 0
|
import logging
# Note: do not introduce unnecessary library dependencies here, e.g. gym.
# This file is imported from the tune module in order to register RLlib agents.
from ray.rllib.env.base_env import BaseEnv
from ray.rllib.env.external_env import ExternalEnv
from ray.rllib.env.multi_agent_env import MultiAgentEnv
from ray.rllib.env.vector_env import VectorEnv
from ray.rllib.evaluation.rollout_worker import RolloutWorker
from ray.rllib.policy.policy import Policy
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.policy.tf_policy import TFPolicy
from ray.rllib.policy.torch_policy import TorchPolicy
from ray.tune.registry import register_trainable
def _setup_logger():
logger = logging.getLogger("ray.rllib")
handler = logging.StreamHandler()
handler.setFormatter(
logging.Formatter(
"%(asctime)s\t%(levelname)s %(filename)s:%(lineno)s -- %(message)s"
))
logger.addHandler(handler)
logger.propagate = False
def _register_all():
from ray.rllib.agents.trainer import Trainer, with_common_config
from ray.rllib.agents.registry import ALGORITHMS, get_agent_class
from ray.rllib.contrib.registry import CONTRIBUTED_ALGORITHMS
for key in list(ALGORITHMS.keys()) + list(CONTRIBUTED_ALGORITHMS.keys(
)) + ["__fake", "__sigmoid_fake_data", "__parameter_tuning"]:
register_trainable(key, get_agent_class(key))
def _see_contrib(name):
"""Returns dummy agent class warning algo is in contrib/."""
class _SeeContrib(Trainer):
_name = "SeeCo
|
ntrib"
_default_config = with_common_config({})
def setup(self, config):
raise NameError(
"Please run `contrib/{}` instead.".format(name))
return _SeeContrib
# also register the aliases minus contrib/ to give a good error message
for key in list(CONTRIBUTED_ALGORITHMS.
|
keys()):
assert key.startswith("contrib/")
alias = key.split("/", 1)[1]
register_trainable(alias, _see_contrib(alias))
_setup_logger()
_register_all()
__all__ = [
"Policy",
"TFPolicy",
"TorchPolicy",
"RolloutWorker",
"SampleBatch",
"BaseEnv",
"MultiAgentEnv",
"VectorEnv",
"ExternalEnv",
]
|
aarestad/gradschool-stuff
|
crypto/python/factor.py
|
Python
|
gpl-2.0
| 217
| 0.009217
|
import math
def factor(n):
d = 2
factors = []
while n > 1 and d < math.sqrt(n):
|
if n % d == 0:
factors.append(d)
n = n/d
else:
|
d=d+1
return factors
|
yasoob/PythonRSSReader
|
venv/lib/python2.7/dist-packages/oauthlib/oauth2/rfc6749/clients/mobile_application.py
|
Python
|
mit
| 9,122
| 0.00285
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
"""
oauthlib.oauth2.rfc6749
~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for consuming and providing OAuth 2.0 RFC6749.
"""
from .base import Client
from ..parameters import prepare_grant_uri
from ..parameters import parse_implicit_response
class MobileApplicationClient(Client):
"""A public client utilizing the implicit code grant workflow.
A user-agent-based application is a public client in which the
client code is downloaded from a web server and executes within a
user-agent (e.g. web browser) on the device used by the resource
owner. Protocol data and credentials are easily accessible (and
often visible) to the resource owner. Since such applications
reside within the user-agent, they can make seamless use of the
user-agent capabilities when requesting authorization.
The implicit grant type is used to obtain access tokens (it does not
support the issuance of refresh tokens) and is optimized for public
clients known to operate a particular redirection URI. These clients
are typically implemented in a browser using a scripting language
such as JavaScript.
As a redirection-based flow, the client must be capable of
interacting with the resource owner's user-agent (typically a web
browser) and capable of receiving incoming requests (via redirection)
from the authorization server.
Unlike the authorization code grant type in which the client makes
separate requests for authorization and access token, the client
receives the access token as the result of the authorization request.
The implicit grant type does not include client authentication, and
relies on the presence of the resource owner and the registration of
the redirection URI. Because the access token is encoded into the
redirection URI, it may be exposed to the resource owner and other
applications residing on the same device.
"""
def prepare_request_uri(self, uri, redirect_uri=None, scope=None,
state=None, **kwargs):
"""Prepare the implicit grant request URI.
The client constructs the request URI by adding the following
parameters to the query component of the authorization endpoint URI
using the "application/x-www-form-urlencoded" format, per `Appendix B`_:
:param redirect_uri: OPTIONAL. The redirect URI must be an absolute URI
and it should have been registerd with the OAuth
provider prior to use. As described in `Section 3.1.2`_.
:param scope: OPTIONAL. The scope of the access request as described by
Section 3.3`_. These may be any string but are
|
commonly
URIs or various categories such as ``videos`` or ``documents``.
:param state: RECOMMENDED. An opaque value used by the client to maintain
state between the request and callback. The authorization
server includes this value when redirecting the user-agent back
to the client. The parameter SHOULD be used for preventing
cross-site request forgery as described in `S
|
ection 10.12`_.
:param kwargs: Extra arguments to include in the request URI.
In addition to supplied parameters, OAuthLib will append the ``client_id``
that was provided in the constructor as well as the mandatory ``response_type``
argument, set to ``token``::
>>> from oauthlib.oauth2 import MobileApplicationClient
>>> client = MobileApplicationClient('your_id')
>>> client.prepare_request_uri('https://example.com')
'https://example.com?client_id=your_id&response_type=token'
>>> client.prepare_request_uri('https://example.com', redirect_uri='https://a.b/callback')
'https://example.com?client_id=your_id&response_type=token&redirect_uri=https%3A%2F%2Fa.b%2Fcallback'
>>> client.prepare_request_uri('https://example.com', scope=['profile', 'pictures'])
'https://example.com?client_id=your_id&response_type=token&scope=profile+pictures'
>>> client.prepare_request_uri('https://example.com', foo='bar')
'https://example.com?client_id=your_id&response_type=token&foo=bar'
.. _`Appendix B`: http://tools.ietf.org/html/rfc6749#appendix-B
.. _`Section 2.2`: http://tools.ietf.org/html/rfc6749#section-2.2
.. _`Section 3.1.2`: http://tools.ietf.org/html/rfc6749#section-3.1.2
.. _`Section 3.3`: http://tools.ietf.org/html/rfc6749#section-3.3
.. _`Section 10.12`: http://tools.ietf.org/html/rfc6749#section-10.12
"""
return prepare_grant_uri(uri, self.client_id, 'token',
redirect_uri=redirect_uri, state=state, scope=scope, **kwargs)
def parse_request_uri_response(self, uri, state=None, scope=None):
"""Parse the response URI fragment.
If the resource owner grants the access request, the authorization
server issues an access token and delivers it to the client by adding
the following parameters to the fragment component of the redirection
URI using the "application/x-www-form-urlencoded" format:
:param uri: The callback URI that resulted from the user being redirected
back from the provider to you, the client.
:param state: The state provided in the authorization request.
:param scope: The scopes provided in the authorization request.
:return: Dictionary of token parameters.
:raises: Warning if scope has changed. OAuth2Error if response is invalid.
A successful response should always contain
**access_token**
The access token issued by the authorization server. Often
a random string.
**token_type**
The type of the token issued as described in `Section 7.1`_.
Commonly ``Bearer``.
**state**
If you provided the state parameter in the authorization phase, then
the provider is required to include that exact state value in the
response.
While it is not mandated it is recommended that the provider include
**expires_in**
The lifetime in seconds of the access token. For
example, the value "3600" denotes that the access token will
expire in one hour from the time the response was generated.
If omitted, the authorization server SHOULD provide the
expiration time via other means or document the default value.
**scope**
Providers may supply this in all responses but are required to only
if it has changed since the authorization request.
A few example responses can be seen below::
>>> response_uri = 'https://example.com/callback#access_token=sdlfkj452&state=ss345asyht&token_type=Bearer&scope=hello+world'
>>> from oauthlib.oauth2 import MobileApplicationClient
>>> client = MobileApplicationClient('your_id')
>>> client.parse_request_uri_response(response_uri)
{
'access_token': 'sdlfkj452',
'token_type': 'Bearer',
'state': 'ss345asyht',
'scope': [u'hello', u'world']
}
>>> client.parse_request_uri_response(response_uri, state='other')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "oauthlib/oauth2/rfc6749/__init__.py", line 598, in parse_request_uri_response
**scope**
File "oauthlib/oauth2/rfc6749/parameters.py", line 197, in parse_implicit_response
raise ValueError("Mismatching or missing state in params.")
ValueError: Mismatching or missing state in params.
>>> client.parse_request_uri_response(response_uri, scope=['othe
|
kanboard/kanboard-cli
|
kanboard_cli/shell.py
|
Python
|
mit
| 3,401
| 0.000588
|
# The MIT License (MIT)
#
# Copyright (c) 2016 Frederic Guillot
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from cliff import app
from cliff import commandmanager
from pbr import version as app_version
import sys
from kanboard_cli.commands import application
from kanboard_cli.commands import project
from kanboard_cli.commands import task
from kanboard_cli import client
class KanboardShell(app.App):
def __init__(self):
super(KanboardShell, self).__init__(
description='Kanboard Command Line Client',
version=app_version.VersionInfo('kanboard_cli').version_string(),
command_manager=commandmanager.CommandManager('kanboard.cli'),
deferred_help=True)
self.client = None
self.is_super_user = True
def build_option_parser(self, description, version, argparse_kwargs=None):
parser = super(KanboardShell, self).build_option_parser(
description, version, argparse_kwargs=argparse_kwargs)
parser.add_argument(
'--url',
metavar='<api url>',
help='Kanboard API URL',
)
parser.add_argument(
'--username',
metavar='<api username>',
help='API username',
)
parser.add_argument(
'--password',
metavar='<api password>',
help='API password/token',
)
parser.add_argument(
'--auth-header',
metavar='<authentication header>',
help='API authentication header',
)
return parser
def initialize_app(self, argv):
client_manager = client.ClientManager(self.options)
self.client = client_manager.get_client()
|
self.is_super_user = client_manager.is_super_user()
self.command_manager.add_command('app version', application.ShowVersion)
self.command_manager.add_command('app timezone', application.ShowTimezone)
self.command_manager.add_command('project show', project.ShowProject)
self.command_manager.add_c
|
ommand('project list', project.ListProjects)
self.command_manager.add_command('task create', task.CreateTask)
self.command_manager.add_command('task list', task.ListTasks)
def main(argv=sys.argv[1:]):
return KanboardShell().run(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
Clustaar/clustaar.authorize
|
tests/authorize/conditions/test_true_condition.py
|
Python
|
mit
| 231
| 0
|
import pytest
from
|
clustaar.authorize.conditions import TrueCondition
@pytest.fixture
def condition():
return TrueCondition()
class TestCall(object):
def test_returns_true(s
|
elf, condition):
assert condition({})
|
stargaser/astropy
|
astropy/io/ascii/__init__.py
|
Python
|
bsd-3-clause
| 1,565
| 0.000639
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
""" An ext
|
ensible ASCII table reader and writer.
"""
from .core import
|
(InconsistentTableError,
ParameterError,
NoType, StrType, NumType, FloatType, IntType, AllType,
Column,
BaseInputter, ContinuationLinesInputter,
BaseHeader,
BaseData,
BaseOutputter, TableOutputter,
BaseReader,
BaseSplitter, DefaultSplitter, WhitespaceSplitter,
convert_numpy,
masked
)
from .basic import (Basic, BasicHeader, BasicData,
Rdb,
Csv,
Tab,
NoHeader,
CommentedHeader)
from .fastbasic import (FastBasic,
FastCsv,
FastTab,
FastNoHeader,
FastCommentedHeader,
FastRdb)
from .cds import Cds
from .ecsv import Ecsv
from .latex import Latex, AASTex, latexdicts
from .html import HTML
from .ipac import Ipac
from .daophot import Daophot
from .sextractor import SExtractor
from .fixedwidth import (FixedWidth, FixedWidthNoHeader,
FixedWidthTwoLine, FixedWidthSplitter,
FixedWidthHeader, FixedWidthData)
from .rst import RST
from .ui import (set_guess, get_reader, read, get_writer, write, get_read_trace)
from . import connect
|
mperrin/misc_astro
|
idlastro_ports/tvbox.py
|
Python
|
bsd-3-clause
| 511
| 0.013699
|
import numpy as np
import matplotlib.pyplot as pl
def tvbox(box=(1,1), xcen=0, ycen=0, center=None,**kwargs):
"""
draw a circle on an image.
radius
xcen
ycen
center= tuple in (Y,X) order.
"""
if center is not None:
xcen=center[1]
ycen=center[0]
|
x = [xcen-box[0], xcen+box[0], xcen+box[0], xcen-box[0], xcen-box[0]]
y = [ycen-box[1], ycen-box[1], ycen+box[1], ycen+box[1], ycen-box[1]]
pl.plot(x,y, **kwargs)
|
|
jmanoel7/pluma-plugins-0
|
plugins/zencoding/plugin.py
|
Python
|
gpl-3.0
| 6,126
| 0.003591
|
# @file plugin.py
#
# Connect Zen Coding to Pluma.
#
# Adapted to pluma by Joao Manoel (joaomanoel7@gmail.com)
#
# Original Author Franck Marcia (franck.marcia@gmail.com)
#
import pluma, gobject, gtk, os
from zen_editor import ZenEditor
zencoding_ui_str = """
<ui>
<menubar name="MenuBar">
<menu name="EditMenu" action="Edit">
<placeholder name="EditOps_5">
<menu action="ZenCodingMenuAction">
<menuitem name="ZenCodingExpand" action="ZenCodingExpandAction"/>
<menuitem name="ZenCodingExpandW" action="ZenCodingExpandWAction"/>
<menuitem name="ZenCodingWrap" action="ZenCodingWrapAction"/>
<separator/>
<menuitem name="ZenCodingInward" action="ZenCodingInwardAction"/>
<menuitem name="ZenCodingOutward" action="ZenCodingOutwardAction"/>
<menuitem name="ZenCodingMerge" action="ZenCodingMergeAction"/>
<separator/>
<menuitem name="ZenCodingPrev" action="ZenCodingPrevAction"/>
<menuitem name="ZenCodingNext" action="ZenCodingNextAction"/>
<separator/>
<menuitem name="ZenCodingRemove" action="ZenCodingRemoveAction"/>
<menuitem name="ZenCodingSplit" action="ZenCodingSplitAction"/>
<menuitem name="ZenCodingComment" action="ZenCodingCommentAction"/>
</menu>
</placeholder>
</menu>
</menubar>
</ui>
"""
class ZenCodingPlugin(pluma.Plugin):
"""A Pluma plugin to implement Zen Coding's HTML and CSS shorthand expander."""
def activate(self, window):
actions = [
('ZenCodingMenuAction', None, '_Zen Coding', None, "Zen Coding tools", None),
('ZenCodingExpandAction', None, '_Expand abbreviation', '<Ctrl>E', "Expand abbreviation to raw HTML/CSS", self.expand_abbreviation),
('ZenCodingExpandWAction', None, 'E_xpand dynamic abbreviation...', '<Ctrl><Alt>E', "Dynamically expand abbreviation as you type", self.expand_with_abbreviation),
('ZenCodingWrapAction', None, '_Wrap with abbreviation...', '<Ctrl><Shift>E', "Wrap with code expanded from abbreviation", self.wrap_with_abbreviation),
('ZenCodingInwardAction', None, 'Balance tag _inward', '<Ctrl><Alt>I', "Select inner tag's content", self.match_pair_inward),
('ZenCodingOutwardAction', None, 'Balance tag _outward', '<Ctrl><Alt>O', "Select outer tag's content", self.match_pair_outward),
('ZenCodingMergeAction', None, '_Merge lines', '<Ctrl><Alt>M', "Merge all lines of the current selection", self.merge_lines),
('ZenCodingPrevAction', None, '_Previous edit point', '<Alt>Left', "Place the cursor at the previous edit point", self.prev_edit_point),
('ZenCodingNextAction',
|
None, '_Next edit point', '<Alt>Right', "Place the cursor at the next edit point", self.next_edit_point),
('ZenCodingRemoveAction', None, '_Remove tag', '<C
|
trl><Alt>R', "Remove a tag", self.remove_tag),
('ZenCodingSplitAction', None, 'Split or _join tag', '<Ctrl><Alt>J', "Toggle between single and double tag", self.split_join_tag),
('ZenCodingCommentAction', None, 'Toggle _comment', '<Ctrl><Alt>C', "Toggle an XML or HTML comment", self.toggle_comment)
]
windowdata = dict()
window.set_data("ZenCodingPluginDataKey", windowdata)
windowdata["action_group"] = gtk.ActionGroup("PlumaZenCodingPluginActions")
windowdata["action_group"].add_actions(actions, window)
manager = window.get_ui_manager()
manager.insert_action_group(windowdata["action_group"], -1)
windowdata["ui_id"] = manager.add_ui_from_string(zencoding_ui_str)
window.set_data("ZenCodingPluginInfo", windowdata)
self.editor = ZenEditor()
error = self.editor.get_user_settings_error()
if error:
md = gtk.MessageDialog(window, gtk.DIALOG_MODAL, gtk.MESSAGE_ERROR,
gtk.BUTTONS_CLOSE, "There is an error in user settings:")
message = "{0} on line {1} at character {2}\n\nUser settings will not be available."
md.set_title("Zen Coding error")
md.format_secondary_text(message.format(error['msg'], error['lineno'], error['offset']))
md.run()
md.destroy()
def deactivate(self, window):
windowdata = window.get_data("ZenCodingPluginDataKey")
manager = window.get_ui_manager()
manager.remove_ui(windowdata["ui_id"])
manager.remove_action_group(windowdata["action_group"])
def update_ui(self, window):
view = window.get_active_view()
windowdata = window.get_data("ZenCodingPluginDataKey")
windowdata["action_group"].set_sensitive(bool(view and view.get_editable()))
def expand_abbreviation(self, action, window):
self.editor.expand_abbreviation(window)
def expand_with_abbreviation(self, action, window):
self.editor.expand_with_abbreviation(window)
def wrap_with_abbreviation(self, action, window):
self.editor.wrap_with_abbreviation(window)
def match_pair_inward(self, action, window):
self.editor.match_pair_inward(window)
def match_pair_outward(self, action, window):
self.editor.match_pair_outward(window)
def merge_lines(self, action, window):
self.editor.merge_lines(window)
def prev_edit_point(self, action, window):
self.editor.prev_edit_point(window)
def next_edit_point(self, action, window):
self.editor.next_edit_point(window)
def remove_tag(self, action, window):
self.editor.remove_tag(window)
def split_join_tag(self, action, window):
self.editor.split_join_tag(window)
def toggle_comment(self, action, window):
self.editor.toggle_comment(window)
|
JacekPierzchlewski/RxCS
|
examples/signals/gaussNoise2_ex1.py
|
Python
|
bsd-2-clause
| 1,832
| 0.002183
|
"""
This script is an example of how to use the random gaussian noise generator (type 2)
module. |br|
In this example only one signal is generated.
Both the minimum and the maximum frequency component in the signal is regulated.
After the generation, spectrum fo the signal is analyzed with an Welch analysis
and ploted.
*Author*:
Jacek Pierzchlewski, Aalborg University, Denmark. <jap@es.aau.dk>
*Version*:
|
1.0 | 20-JAN-2016 : * Version 1.0 released. |br|
*License*:
BSD 2-Clause
"""
import rxcs
import scipy.signal as scsig
import matplotlib.pyplot as plt
def _gaussNoise2_ex1():
# Things on the table:
gaussNoise =
|
rxcs.sig.gaussNoise2() # Gaussian noise generator
# Configure the generator...
gaussNoise.fR = 100e6 # Representation sampling frequency [100 MHz]
gaussNoise.tS = 1 # Time [1 sec]
gaussNoise.fMin = 100e3 # Minimum frequency component [100 kHz]
gaussNoise.fMax = 200e3 # Maximum frequency component [200 kHz]
gaussNoise.run() # ... and run it!
vSig = gaussNoise.mSig[0, :] # take the generated signal
# -----------------------------------------------------------------
# Analyze the signal and plot it
(vFxx, vPxx) = scsig.welch(vSig, fs=gaussNoise.fR, nperseg=100*1024, noverlap=100*512)
hFig1 = plt.figure(1)
hSubPlot1 = hFig1.add_subplot(111)
hSubPlot1.grid(True)
hSubPlot1.set_title('Spectrum of the signal (psd)')
hSubPlot1.set_xlabel('Frequency [kHz]')
hSubPlot1.plot(vFxx/1e3, vPxx, '-')
hSubPlot1.set_xlim(0, 1e3)
plt.show(block=True)
# =====================================================================
# Trigger when start as a script
# =====================================================================
if __name__ == '__main__':
_gaussNoise2_ex1()
|
jotes/pontoon
|
docs/conf.py
|
Python
|
bsd-3-clause
| 9,297
| 0.006133
|
# -*- coding: utf-8 -*-
#
# Pontoon documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 4 21:51:51 2015.
#
# This file is execfile()d with the current directory set to its
# cont
|
aining dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
|
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.graphviz',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Pontoon'
copyright = u'2015, Matjaž Horvat, Mozilla Foundation'
author = u'Matjaž Horvat, Mozilla Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'venv']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Pontoondoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Pontoon.tex', u'Pontoon Documentation',
u'Matjaž Horvat, Mozilla Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_
|
dashwav/nano-chan
|
cogs/utils/__init__.py
|
Python
|
mit
| 129
| 0
|
from .db_utils import PostgresController
from .enums import Action, Change
|
__all__ = ['PostgresController', 'Action', 'Cha
|
nge']
|
queria/my-tempest
|
tempest/api/compute/security_groups/test_security_group_rules_negative.py
|
Python
|
apache-2.0
| 7,051
| 0
|
# Copyright 2013 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute.security_groups import base
from tempest.common.utils import data_utils
from tempest import config
from tempest import exceptions
from tempest import test
CONF = config.CONF
def not_existing_id():
if CONF.service_available.neutron:
return data_utils.rand_uuid()
else:
return data_utils.rand_int_id(start=999)
class SecurityGroupRulesNegativeTestJSON(base.BaseSecurityGroupsTest):
@classmethod
|
def resource_setup(cls):
super(SecurityGroupRulesNegativeTestJSON, cls).resource_setup()
cls.client = cls.security_groups_client
@test.attr(type=['negative', 'smoke'])
@test.services('network')
def test_create_security_group_rule_with_non_existent_id(self):
# Negative test: Creation of Security Group rule should FAIL
|
# with non existent Parent group id
# Adding rules to the non existent Security Group id
parent_group_id = not_existing_id()
ip_protocol = 'tcp'
from_port = 22
to_port = 22
self.assertRaises(exceptions.NotFound,
self.client.create_security_group_rule,
parent_group_id, ip_protocol, from_port, to_port)
@test.attr(type=['negative', 'smoke'])
@test.services('network')
def test_create_security_group_rule_with_invalid_id(self):
# Negative test: Creation of Security Group rule should FAIL
# with Parent group id which is not integer
# Adding rules to the non int Security Group id
parent_group_id = data_utils.rand_name('non_int_id')
ip_protocol = 'tcp'
from_port = 22
to_port = 22
self.assertRaises(exceptions.BadRequest,
self.client.create_security_group_rule,
parent_group_id, ip_protocol, from_port, to_port)
@test.attr(type=['negative', 'smoke'])
@test.services('network')
def test_create_security_group_rule_duplicate(self):
# Negative test: Create Security Group rule duplicate should fail
# Creating a Security Group to add rule to it
resp, sg = self.create_security_group()
# Adding rules to the created Security Group
parent_group_id = sg['id']
ip_protocol = 'tcp'
from_port = 22
to_port = 22
resp, rule = \
self.client.create_security_group_rule(parent_group_id,
ip_protocol,
from_port,
to_port)
self.addCleanup(self.client.delete_security_group_rule, rule['id'])
self.assertEqual(200, resp.status)
# Add the same rule to the group should fail
self.assertRaises(exceptions.BadRequest,
self.client.create_security_group_rule,
parent_group_id, ip_protocol, from_port, to_port)
@test.attr(type=['negative', 'smoke'])
@test.services('network')
def test_create_security_group_rule_with_invalid_ip_protocol(self):
# Negative test: Creation of Security Group rule should FAIL
# with invalid ip_protocol
# Creating a Security Group to add rule to it
resp, sg = self.create_security_group()
# Adding rules to the created Security Group
parent_group_id = sg['id']
ip_protocol = data_utils.rand_name('999')
from_port = 22
to_port = 22
self.assertRaises(exceptions.BadRequest,
self.client.create_security_group_rule,
parent_group_id, ip_protocol, from_port, to_port)
@test.attr(type=['negative', 'smoke'])
@test.services('network')
def test_create_security_group_rule_with_invalid_from_port(self):
# Negative test: Creation of Security Group rule should FAIL
# with invalid from_port
# Creating a Security Group to add rule to it
resp, sg = self.create_security_group()
# Adding rules to the created Security Group
parent_group_id = sg['id']
ip_protocol = 'tcp'
from_port = data_utils.rand_int_id(start=65536)
to_port = 22
self.assertRaises(exceptions.BadRequest,
self.client.create_security_group_rule,
parent_group_id, ip_protocol, from_port, to_port)
@test.attr(type=['negative', 'smoke'])
@test.services('network')
def test_create_security_group_rule_with_invalid_to_port(self):
# Negative test: Creation of Security Group rule should FAIL
# with invalid to_port
# Creating a Security Group to add rule to it
resp, sg = self.create_security_group()
# Adding rules to the created Security Group
parent_group_id = sg['id']
ip_protocol = 'tcp'
from_port = 22
to_port = data_utils.rand_int_id(start=65536)
self.assertRaises(exceptions.BadRequest,
self.client.create_security_group_rule,
parent_group_id, ip_protocol, from_port, to_port)
@test.attr(type=['negative', 'smoke'])
@test.services('network')
def test_create_security_group_rule_with_invalid_port_range(self):
# Negative test: Creation of Security Group rule should FAIL
# with invalid port range.
# Creating a Security Group to add rule to it.
resp, sg = self.create_security_group()
# Adding a rule to the created Security Group
secgroup_id = sg['id']
ip_protocol = 'tcp'
from_port = 22
to_port = 21
self.assertRaises(exceptions.BadRequest,
self.client.create_security_group_rule,
secgroup_id, ip_protocol, from_port, to_port)
@test.attr(type=['negative', 'smoke'])
@test.services('network')
def test_delete_security_group_rule_with_non_existent_id(self):
# Negative test: Deletion of Security Group rule should be FAIL
# with non existent id
non_existent_rule_id = not_existing_id()
self.assertRaises(exceptions.NotFound,
self.client.delete_security_group_rule,
non_existent_rule_id)
class SecurityGroupRulesNegativeTestXML(SecurityGroupRulesNegativeTestJSON):
_interface = 'xml'
|
mcdevs/Burger
|
burger/toppings/topping.py
|
Python
|
mit
| 1,295
| 0.000772
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""
Copyright (c) 2011 Tyler Kenendy <tk@tkte.ch>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHO
|
UT WARRANTY OF ANY K
|
IND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
class Topping(object):
PROVIDES = None
DEPENDS = None
@staticmethod
def act(aggregate, classloader, verbose=False):
raise NotImplementedError()
|
mistio/libcloud
|
docs/examples/loadbalancer/elb/ex_list_balancer_policy_types.py
|
Python
|
apache-2.0
| 291
| 0
|
from libcloud.loadbalancer.types import Provider
from libcloud.loadbalancer.providers import get_driver
ACCESS_ID = 'yo
|
ur access id'
SECRET_KEY = 'your secret key'
cls = get_driver(Provider.ELB)
driver = cls(key=ACCESS_ID, secret=SECRET_KEY)
pri
|
nt(driver.ex_list_balancer_policy_types())
|
tchellomello/home-assistant
|
homeassistant/components/synology_dsm/camera.py
|
Python
|
apache-2.0
| 3,046
| 0.000657
|
"""Support for Synology DSM cameras."""
from typing import Dict
from synology_dsm.api.surveillance_station import SynoSurveillanceStation
from homeassistant.components.camera import SUPPORT_STREAM, Camera
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import SynologyDSMEntity
from .const import (
DOMAIN,
ENTITY_CLASS,
ENTITY_ENABLE,
ENTITY_ICON,
ENTITY_NAME,
ENTITY_UNIT,
SYNO_API,
)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Synology NAS binary sensor."""
api = hass.data[DOMAIN][entry.unique_id][SYNO_API]
if SynoSurveillanceStation.CAMERA_API_KEY not in api.dsm.apis:
return
surveillance_station = api.surveillance_station
await hass.async_add_executor_job(surveillance_station.update)
cameras = surveillance_station.get_all_cameras()
entities = [SynoDSMCamera(api, camera) for camera in cameras]
async_add_entities(entities)
class SynoDSMCamera(SynologyDSMEntity, Camera):
"""Representation a Synology camera."""
def __init__(self, api, camera):
"""Initialize a Synology camera."""
super().__init__(
api,
f"{SynoSurveillanceStation.CAMERA_API_KEY}:{camera.id}",
{
ENTITY_NAME: camera.name,
ENTITY_CLASS: None,
ENTITY_ICON:
|
None,
ENTITY_ENABLE: True,
ENTIT
|
Y_UNIT: None,
},
)
self._camera = camera
@property
def device_info(self) -> Dict[str, any]:
"""Return the device information."""
return {
"identifiers": {(DOMAIN, self._api.information.serial, self._camera.id)},
"name": self.name,
"model": self._camera.model,
"via_device": (DOMAIN, self._api.information.serial),
}
@property
def supported_features(self) -> int:
"""Return supported features of this camera."""
return SUPPORT_STREAM
@property
def is_recording(self):
"""Return true if the device is recording."""
return self._camera.is_recording
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return self._camera.is_motion_detection_enabled
def camera_image(self) -> bytes:
"""Return bytes of camera image."""
return self._api.surveillance_station.get_camera_image(self._camera.id)
async def stream_source(self) -> str:
"""Return the source of the stream."""
return self._camera.live_view.rtsp
def enable_motion_detection(self):
"""Enable motion detection in the camera."""
self._api.surveillance_station.enable_motion_detection(self._camera.id)
def disable_motion_detection(self):
"""Disable motion detection in camera."""
self._api.surveillance_station.disable_motion_detection(self._camera.id)
|
noironetworks/neutron
|
neutron/services/auto_allocate/db.py
|
Python
|
apache-2.0
| 17,334
| 0.000404
|
# Copyright 2015-2016 Hewlett Packard Enterprise Development Company, LP
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import constants as api_const
from neutron_lib.api.definitions import l3 as l3_apidef
from neutron_lib.api.definitions import network as net_def
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib.db import api as db_api
from neutron_lib.db import utils as db_utils
from neutron_lib import exceptions as n_exc
from neutron_lib.objects import exceptions as obj_exc
from neutron_lib.plugins import constants
from neutron_lib.plugins import directory
from neutron_lib.plugins import utils as p_utils
from oslo_log import log as logging
from neutron._i18n import _
from neutron.common import exceptions as c_exc
from neutron.db import _resource_extend as resource_extend
from neutron.db import common_db_mixin
from neutron.objects import auto_allocate as auto_allocate_obj
from neutron.objects import base as base_obj
from neutron.objects import network as net_obj
from neutron.services.auto_allocate import exceptions
LOG = logging.getLogger(__name__)
CHECK_REQUIREMENTS = 'dry-run'
def _ensure_external_network_default_value_callback(
resource, event, trigger, **kwargs):
"""Ensure the is_default db field matches the create/update request."""
# TODO(boden): remove shim once all callbacks use payloads
if 'payload' in kwargs:
_request = kwargs['payload'].request_body
_context = kwargs['payload'].context
_network = kwargs['payload'].desired_state
_orig = kwargs['payload'].states[0]
else:
_request = kwargs['request']
_context = kwargs['context']
_network = kwargs['network']
_orig = kwargs.get('original_network')
@db_api.retry_if_session_inactive()
def _do_ensure_external_network_default_value_callback(
context, request, orig, network):
is_default = request.get(api_const.IS_DEFAULT)
if is_default is None:
return
if is_default:
# ensure only one default external network at any given time
pager = base_obj.Pager(limit=1)
objs = net_obj.ExternalNetwork.get_objects(context,
_pager=pager, is_default=True)
if objs:
if objs[0] and network['id'] != objs[0].network_id:
raise exceptions.DefaultExternalNetworkExists(
net_id=objs[0].network_id)
if orig and orig.get(api_const.IS_DEFAULT) == is_default:
return
network[api_const.IS_DEFAULT] = is_default
# Reflect the status of the is_default on the create/update request
obj = net_obj.ExternalNetwork.get_object(context,
network_id=network['id'])
if obj:
obj.is_default = is_default
obj.update()
_do_ensure_external_network_default_value_callback(
_context, _request, _orig, _network)
@resource_extend.has_resource_extenders
class AutoAllocatedTopologyMixin(common_db_mixin.CommonDbMixin):
def __new__(cls, *args, **kwargs):
# NOTE(kevinbenton): we subscribe on object construction because
# the tests blow away the callback manager for each run
new = super(AutoAllocatedTopologyMixin, cls).__new__(cls, *args,
**kwargs)
registry.subscribe(_ensure_external_network_default_value_callback,
resources.NETWORK, events.PRECOMMIT_UPDATE)
registry.subscribe(_ensure_external_network_default_value_callback,
resources.NETWORK, events.PRECOMMIT_CREATE)
return new
# TODO(armax): if a tenant modifies auto allocated resources under
# the hood the behavior of the get_auto_allocated_topology API is
# undetermined. Consider adding callbacks to deal with the following
# situations:
# - insert subnet -> plug router interface
# - delete router -> remove the entire topology
# - update subnet -> prevent operation
# - update router gateway -> prevent operation
# - ...
@property
def core_plugin(self):
if not getattr(self, '_core_plugin', None):
self._core_plugin = directory.get_plugin()
return self._core_plugin
@property
def l3_plugin(self):
if not getattr(self, '_l3_plugin', None):
self._l3_plugin = directory.get_plugin(constants.L3)
return self._l3_plugin
@staticmethod
@resource_extend.extends([net_def.COLLECTION_NAME])
def _extend_external_network_default(net_res, net_db):
"""Add is_default field to 'show' response."""
if net_db.external is not None:
net_res[api_const.IS_DEFAULT] = net_db.external.is_default
return net_res
def get_auto_allocated_topology(self, context, tenant_id, fields=None):
"""Return tenant's network associated to auto-allocated topology.
The topology will be provisioned upon return, if network is missing.
"""
fields = fields or []
tenant_id = self._validate(context, tenant_id)
if CHECK_REQUIREMENTS in fields:
# for dry-run requests, simply validates that subsequent
# requests can be fulfilled based on a set of requirements
# such as existence of default networks, pools, etc.
|
return self._check_requirements(context, tenant_id)
elif fields:
raise n_exc.BadRequest(resource='auto_allocate',
msg=_("Unrecognized field"))
# Check for an existent topology
network_id =
|
self._get_auto_allocated_network(context, tenant_id)
if network_id:
return self._response(network_id, tenant_id, fields=fields)
# See if we indeed have an external network to connect to, otherwise
# we will fail fast
default_external_network = self._get_default_external_network(
context)
# If we reach this point, then we got some work to do!
network_id = self._build_topology(
context, tenant_id, default_external_network)
return self._response(network_id, tenant_id, fields=fields)
def delete_auto_allocated_topology(self, context, tenant_id):
tenant_id = self._validate(context, tenant_id)
topology = self._get_auto_allocated_topology(context, tenant_id)
if topology:
subnets = self.core_plugin.get_subnets(
context,
filters={'network_id': [topology['network_id']]})
self._cleanup(
context, network_id=topology['network_id'],
router_id=topology['router_id'], subnets=subnets)
def _build_topology(self, context, tenant_id, default_external_network):
"""Build the network topology and returns its network UUID."""
try:
subnets = self._provision_tenant_private_network(
context, tenant_id)
network_id = subnets[0]['network_id']
router = self._provision_external_connectivity(
context, default_external_network, subnets, tenant_id)
network_id = self._save(
context, tenant_id, network_id, router['id'], subnets)
return network_id
except exceptions.UnknownProvisioningError as e:
# Clean partially provisioned topologies, and reraise the
# error. If it can be retried, so be it.
LOG.err
|
mvaled/sentry
|
tests/sentry/api/endpoints/test_group_hashes.py
|
Python
|
bsd-3-clause
| 3,716
| 0.000807
|
from __future__ import absolute_import
import copy
from six.moves.urllib.parse import urlencode
from sentry.models import GroupHash
from sentry.testutils import APITestCase, SnubaTestCase
from sentry.testutils.factories import DEFAULT_EVENT_DATA
from sentry.testutils.helpers.datetime import iso_format, before_now
from sentry.eventstream.snuba import SnubaEventStream
class GroupHashesTest(APITestCase, SnubaTestCase):
def test_only_return_latest_event(self):
self.login_as(user=self.user)
min_ago = iso_format(before_now(minutes=1))
two_min_ago = iso_format(before_now(minutes=2))
new_event_id = "b" * 32
old_event = self.store_event(
data={
"event_id": "a" * 32,
"message": "message",
"timestamp": two_min_ago,
"stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]),
"fingerprint": ["group-1"],
},
project_id=self.project.id,
)
new_event = self.store_event(
data={
"event_id": new_event_id,
"message": "message",
"timestamp": min_ago,
"stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]),
"fingerprint": ["group-1"],
},
project_id=self.project.id,
)
assert new_event.group_id == old_event.group_id
url = u"/api/0/issues/{}/hashes/".format(new_event.group_id)
response = self.client.get(url, format="json")
assert response.status_code == 200, response.content
assert len(response.data) == 1
assert response.data[0]["latestEvent"]["eventID"] == new_event_id
def test_return_multiple_hashes(self):
self.login_as(user=self.user)
min_ago = iso_format(before_now(minutes=1))
two_min_ago = iso_format(before_now(minutes=2))
event1 = self.store_event(
|
data={
"event_id": "a" * 32,
"message": "message",
|
"timestamp": two_min_ago,
"stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]),
"fingerprint": ["group-1"],
},
project_id=self.project.id,
)
event2 = self.store_event(
data={
"event_id": "b" * 32,
"message": "message2",
"timestamp": min_ago,
"fingerprint": ["group-2"],
},
project_id=self.project.id,
)
# Merge the events
eventstream = SnubaEventStream()
state = eventstream.start_merge(self.project.id, [event2.group_id], event1.group_id)
eventstream.end_merge(state)
url = u"/api/0/issues/{}/hashes/".format(event1.group_id)
response = self.client.get(url, format="json")
assert response.status_code == 200, response.content
assert len(response.data) == 2
primary_hashes = [hash["id"] for hash in response.data]
assert primary_hashes == [event2.get_primary_hash(), event1.get_primary_hash()]
def test_unmerge(self):
self.login_as(user=self.user)
group = self.create_group()
hashes = [
GroupHash.objects.create(project=group.project, group=group, hash=hash)
for hash in ["a" * 32, "b" * 32]
]
url = "?".join(
[
u"/api/0/issues/{}/hashes/".format(group.id),
urlencode({"id": [h.hash for h in hashes]}, True),
]
)
response = self.client.delete(url, format="json")
assert response.status_code == 202, response.content
|
jeansch/octopasty-og
|
octopasty/internal.py
|
Python
|
gpl-3.0
| 5,463
| 0.000366
|
# -*- coding: utf-8 -*-
# Octopasty is an Asterisk AMI proxy
# Copyright (C) 2011 Jean Schurger <jean@schurger.org>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from hashlib import sha1, md5
from time import time
from random import randint
from utils import Packet, bigtime, tmp_debug
from asterisk import Success, Error, Goodbye
KEEP_INTERNAL = ['logoff']
def handle_action(self, packet):
action = packet.packet
if action.name.lower() == 'error':
# needs to handle errors, may be a timeout before next try
login_failed_on_ami(self, packet.emiter)
if action.name.lower() == 'success':
logged_on_ami(self, packet.emiter)
if action.name.lower() == 'challenge':
k = filter(lambda k: k.lower() == 'authtype',
action.parameters.keys())
k = k and k[0] or None
if k:
challenge(self, packet.emiter, packet.locked,
action.parameters.get(k).lower())
if action.name.lower() == 'login':
login = dict()
for k in ['Username', 'Secret', 'Events', 'Key']:
v = action.parameters.get(k) or \
action.parameters.get(k.lower()) or \
action.parameters.get(k.upper())
login[k.lower()] = v
auth_user(self, packet.emiter, packet.locked, login.get('username'),
login.get('secret') or login.get('key'),
(login.get('events') and \
login.get('events').lower() == 'off') and False or True)
if action.name.lower() == 'logoff':
logoff_user(self, packet)
def auth_user(self, emiter, locked, username, secret, wants_events):
login_sucessfull = False
client = self.clients.get(emiter)
if username in self.config.get('users'):
hashed = self.config.get('users').get(username).get('password')
if client.authtype is None:
if sha1(secret).hexdigest() == hashed:
login_sucessfull = True
elif client.authtype[0] == 'md5':
key = client.authtype[1]
_md5 = md5(key)
_md5.update(self.config.get('users').get(username).get('password'))
if secret == _md5.hexdigest():
login_sucessfull = True
if login_sucessfull:
old_id = client.id
client.id = '%s_%d' % (username, bigtime())
self.clients.pop(old_id)
self.clients.update({client.id: client})
client.logged = True
_servers = self.config.get('users').get(username).get('servers')
_servers = [s.strip() for s in _servers.split(',')]
if len(_servers) == 1:
client.binded_server = _servers[0]
else:
client.multiple_servers = _servers
client.wants_events = wants_events
response = Success(parameters=dict(
Message='Authentication accepted'))
p = dict(emiter='__internal__',
locked=locked,
timestamp=time(),
packet=response,
dest=client.id)
tmp_debug("AUTH", "'%s' logged successfully" % username)
self.out_queue.put(Packet(p)
|
)
else:
response = Error(parameters=dict(Message='Authentication failed'))
p = dict(emiter='__internal__',
locked=locked,
|
timestamp=time(),
packet=response,
dest=client.id)
client.send(Packet(p))
tmp_debug("AUTH", "'%s' failed to login" % username)
client.disconnect()
def logoff_user(self, packet):
client = self.clients.get(packet.emiter)
response = Goodbye(parameters=dict(Message="Don't panic."))
p = dict(emiter='__internal__',
locked=packet.locked,
timestamp=time(),
packet=response,
dest=client.id)
client.send(Packet(p))
tmp_debug("AUTH", "'%s' logout" % packet.emiter[:packet.emiter.find('_')])
client.disconnect()
def login_failed_on_ami(self, _ami):
tmp_debug("AUTH", "Login failed on '%s'" % _ami)
def logged_on_ami(self, _ami):
tmp_debug("AUTH", "Logged on '%s'" % _ami)
ami = self.amis.get(_ami)
ami.logged = True
def challenge(self, emiter, locked, authtype):
if authtype == 'md5':
key = str(randint(100000000, 999999999))
response = Success(parameters=dict(
Challenge='%s' % key))
tmp_debug("AUTH", "'%s' asked for '%s' challenge, sent '%s'" % \
(emiter, authtype, key))
else:
response = Error(parameters=dict(
Message='Authentication type not supported'))
client = self.clients.get(emiter)
p = dict(emiter='__internal__',
locked=locked,
timestamp=time(),
packet=response,
dest=client.id)
client.send(Packet(p))
client.authtype = (authtype, key)
|
t-wissmann/qutebrowser
|
tests/unit/misc/test_keyhints.py
|
Python
|
gpl-3.0
| 7,603
| 0
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2020 Ryan Roden-Corrent (rcorre) <ryan@rcorre.net>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Test the keyhint widget."""
import pytest
from qutebrowser.misc import objects
from qutebrowser.misc.keyhintwidget import KeyHintView
def expected_text(*args):
"""Helper to format text we expect the KeyHintView to generate.
Args:
args: One tuple for each row in the expected output.
Tuples are of the form: (prefix, color, suffix, command).
"""
text = '<table>'
for group in args:
text += ("<tr>"
"<td>{}</td>"
"<td style='color: {}'>{}</td>"
"<td style='padding-left: 2ex'>{}</td>"
"</tr>").format(*group)
return text + '</table>'
@pytest.fixture
def keyhint(qtbot, config_stub, key_config_stub):
"""Fixture to initialize a KeyHintView."""
config_stub.val.colors.keyhint.suffix.fg = 'yellow'
keyhint = KeyHintView(0, None)
qtbot.add_widget(keyhint)
assert keyhint.text() == ''
return keyhint
def test_show_and_hide(qtbot, keyhint):
with qtbot.waitSignal(keyhint.update_geometry):
with qtbot.waitExposed(keyhint):
keyhint.show()
keyhint.update_keyhint('normal', '')
assert not keyhint.isVisible()
def test_position_change(keyhint, config_stub):
config_stub.val.statusbar.position = 'top'
stylesheet = keyhint.styleSheet()
assert 'border-bottom-right-radius' in stylesheet
assert 'border-top-right-radius' not in stylesheet
def test_suggestions(keyhint, config_stub):
"""Test that keyhints are shown based on a prefix."""
bindings = {'normal': {
'aa': 'message-info cmd-aa',
'ab': 'message-info cmd-ab',
'aba': 'message-info cmd-aba',
'abb': 'message-info cmd-abb',
'xd': 'message-info cmd-xd',
'xe': 'message-info cmd-xe',
}}
default_bindings = {'normal': {
'ac': 'message-info cmd-ac',
}}
config_stub.val.bindings.default = default_bindings
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint('normal', 'a')
assert keyhint.text() == expected_text(
('a', 'yellow', 'a', 'message-info cmd-aa'),
('a', 'yellow', 'b', 'message-info cmd-ab'),
('a', 'yellow', 'ba', 'message-info cmd-aba'),
('a', 'yellow', 'bb', 'message-info cmd-abb'),
('a', 'yellow', 'c', 'message-info cmd-ac'))
def test_suggestions_special(keyhint, config_stub):
"""Test that special characters work properly as prefix."""
bindings = {'normal': {
'<Ctrl-C>a': 'message-info cmd-Cca',
'<Ctrl-C><Ctrl-C>': 'message-info cmd-CcCc',
'<Ctrl-C><Ctrl-X>': 'message-info cmd-CcCx',
'cbb': 'message-info cmd-cbb',
'xd': 'message-info cmd-xd',
'xe': 'message-info cmd-xe',
}}
default_bindings = {'normal': {
'<Ctrl-C>c': 'message-info cmd-Ccc',
}}
config_stub.val.bindings.default = default_bindings
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint('normal', '<Ctrl+c>')
assert keyhint.text() == expected_text(
('<Ctrl+c>', 'yellow', 'a', 'message-info cmd-Cca'),
('<Ctrl+c>', 'yellow', 'c', 'message-info cmd-Ccc'),
('<Ctrl+c>', 'yellow', '<Ctrl+c>',
'message-info cmd-CcCc'),
('<Ctrl+c>', 'yellow', '<Ctrl+x>',
'message-info cmd-CcCx'))
def test_suggestions_with_count(keyhint, config_stub, monkeypatch, stubs):
"""Test that a count prefix filters out commands that take no count."""
monkeypatch.setattr(objects, 'commands', {
'foo': stubs.FakeCommand(name='foo', takes_count=lambda: False),
'bar': stubs.FakeCommand(name='bar', takes_count=lambda: True),
})
bindings = {'normal': {'aa': 'foo', 'ab': 'bar'}}
config_stub.val.bindings.default = bindings
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint('normal', '2a')
assert keyhint.text() == expected_text(
('a', 'yellow', 'b', 'bar'),
)
def test_special_bindings(keyhint, config_stub):
"""Ensure a prefix of '<' doesn't suggest special keys."""
bindings = {'normal': {
'<a': 'message-info cmd-<a',
'<b': 'message-info cmd-<b',
|
'<ctrl-a>': 'message-info cmd-ctrla',
}}
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint('normal', '<')
assert keyhint.text() == expected_text(
('<', 'yellow', 'a', 'message-info cmd-<a'),
('<', 'yellow', 'b', 'message-info cmd-<b'))
def test_co
|
lor_switch(keyhint, config_stub):
"""Ensure the keyhint suffix color can be updated at runtime."""
bindings = {'normal': {'aa': 'message-info cmd-aa'}}
config_stub.val.colors.keyhint.suffix.fg = '#ABCDEF'
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint('normal', 'a')
assert keyhint.text() == expected_text(('a', '#ABCDEF', 'a',
'message-info cmd-aa'))
def test_no_matches(keyhint, config_stub):
"""Ensure the widget isn't visible if there are no keystrings to show."""
bindings = {'normal': {
'aa': 'message-info cmd-aa',
'ab': 'message-info cmd-ab',
}}
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint('normal', 'z')
assert not keyhint.text()
assert not keyhint.isVisible()
@pytest.mark.parametrize('blacklist, expected', [
(['ab*'], expected_text(('a', 'yellow', 'a', 'message-info cmd-aa'))),
(['*'], ''),
])
def test_blacklist(keyhint, config_stub, blacklist, expected):
"""Test that blacklisted keychains aren't hinted."""
config_stub.val.keyhint.blacklist = blacklist
bindings = {'normal': {
'aa': 'message-info cmd-aa',
'ab': 'message-info cmd-ab',
'aba': 'message-info cmd-aba',
'abb': 'message-info cmd-abb',
'xd': 'message-info cmd-xd',
'xe': 'message-info cmd-xe',
}}
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = bindings
keyhint.update_keyhint('normal', 'a')
assert keyhint.text() == expected
def test_delay(qtbot, stubs, monkeypatch, config_stub, key_config_stub):
timer = stubs.FakeTimer()
monkeypatch.setattr(
'qutebrowser.misc.keyhintwidget.usertypes.Timer',
lambda *_: timer)
interval = 200
bindings = {'normal': {'aa': 'message-info cmd-aa'}}
config_stub.val.keyhint.delay = interval
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = bindings
keyhint = KeyHintView(0, None)
keyhint.update_keyhint('normal', 'a')
assert timer.isSingleShot()
assert timer.interval() == interval
|
troukny/NetGen
|
src/network_handler.py
|
Python
|
gpl-3.0
| 2,563
| 0.014046
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
NetGen is a tool for financial network analysis
Copyright (C) 2013 Tarik Roukny (troukny@ulb.ac.be)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, version 3 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
"""
# ==========================================
# Libraries and Packages
import networkx as nx
# ==========================================
class NetworkHandler:
# -------------------------------------------------------------
#
# init (directory, name)
#
# -------------------------------------------------------------
# def __init__(self, directory, name):
def __init__ (self, directory, name,
list_edges, out_node, in_node,
is_directed, is_weighted, edge_weight):
if is_directed == 'on':
self.G = nx.DiGraph()
else:
self.G = nx.Graph()
self.directory = directory
self.name = name
self.list_edges = list_edges
self.out_node_index = out_node
self.in_node_index = in_node
self.is_weighted = is_weighted
self.edge_weight = edge_weight
self.generate_network()
self.save_network()
# -------------------------------------------------------------
#
# generate_network (data)
#
# -------------------------------------------------------------
def generate_network(self):
if self.is_weighted == 'on':
for edge in self.list_edges:
weight = edge[self.edge_weight]
try :
|
weight = float(weight)
except:
weight = 0.0
self.G.add_edge(edge[self.ou
|
t_node_index], edge[self.in_node_index],{'weight':weight})
else:
for edge in self.list_edges:
self.G.add_edge(edge[self.out_node_index], edge[self.in_node_index])
# -------------------------------------------------------------
#
# save_network ()
#
# -------------------------------------------------------------
def save_network(self):
nx.write_gexf(self.G, self.directory + self.name + '.gexf')
|
mapr/impala
|
tests/stress/test_ddl_stress.py
|
Python
|
apache-2.0
| 3,153
| 0.011418
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import re
import uuid
from subprocess import call
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite
# Number of tables to create per thread
NUM_TBLS_PER_THREAD = 10
# Each client will get a different test id.
TEST_IDS = xrange(0, 10)
# Simple stress test for DDL operations. Attempts to create, cache,
# uncache, then drop many different tables in parallel.
class TestDdlStress(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'targeted-stress'
@classmethod
def add_test_dimensions(cls):
super(TestDdlStress, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(TestDimension('test_id', *TEST_IDS))
cls.TestMatrix.add_constraint(lambda v: v.get_value('exec_option')['batch_size'] == 0)
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
@pytest.mark.stress
def test_create_cache_many_tables(self, vector):
self.client.set_configuration(vector.get_value('exec_option'))
self.client.execute("create database if not exists ddl_stress_testdb")
self.client.execute("use ddl_stress_testdb")
tbl_uniquifier = str(uuid.uuid4()).replace('-', '')
for i in xrange(NUM_TBLS_PER_THREAD):
tbl_name = "tmp_%s_%s" % (tbl_uniquifier, i)
# Create a partitioned and unpartitioned table
self.client.execute("create table %s (i int)" % tbl_name)
self.client.execute("create table %s_part (i int) partitioned by (j int)" %\
tbl_name)
# Add some data to each
self.client.execute("insert overwrite table %s select int_col from "\
"functional.alltypestiny" % tbl_name)
self.client.execute("insert overwrite table %s_part partition(j) "\
"values (1
|
, 1), (2, 2), (3, 3), (4, 4), (4, 4)" % tbl_name)
# Cache the data the unpartitioned table
self.client.execute("alter table %s set cached in 'testPool'" % tbl_name)
# Cache, uncache, then re-cache the data in the partitioned table.
self.client.execute("alter table %s_part set cached in 'tes
|
tPool'" % tbl_name)
self.client.execute("alter table %s_part set uncached" % tbl_name)
self.client.execute("alter table %s_part set cached in 'testPool'" % tbl_name)
# Drop the tables, this should remove the cache requests.
self.client.execute("drop table %s" % tbl_name)
self.client.execute("drop table %s_part" % tbl_name)
|
boryas/eidetic
|
lib/forget.py
|
Python
|
mit
| 226
| 0.00885
|
import rethinkdb as r
import db
def _forget_project(name, conn):
db.get_table().filter(r.row['name'] == name).delete().run
|
(conn)
def forget_project(name):
conn = db.get_conn()
return _forge
|
t_project(name, conn)
|
chfw/pyexcel
|
tests/test_bug_fixes.py
|
Python
|
bsd-3-clause
| 15,003
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from datetim
|
e import datetime
from textwrap impo
|
rt dedent
import psutil
import pyexcel as p
from _compact import StringIO, OrderedDict
from nose.tools import eq_
def test_bug_01():
"""
if first row of csv is shorter than the rest of the rows,
the csv will be truncated by first row. This is a bug
"a,d,e,f" <- this will be 1
'1',2,3,4 <- 4
'2',3,4,5
'b' <- give '' for missing cells
"""
r = p.Reader(os.path.join("tests", "fixtures", "bug_01.csv"))
assert len(r.row[0]) == 4
# test "" is append for empty cells
assert r[0, 1] == ""
assert r[3, 1] == ""
def test_issue_03():
file_prefix = "issue_03_test"
csv_file = "%s.csv" % file_prefix
xls_file = "%s.xls" % file_prefix
my_sheet_name = "mysheetname"
data = [[1, 1]]
sheet = p.Sheet(data, name=my_sheet_name)
sheet.save_as(csv_file)
assert os.path.exists(csv_file)
sheet.save_as(xls_file)
book = p.load_book(xls_file)
assert book.sheet_names()[0] == my_sheet_name
os.unlink(csv_file)
os.unlink(xls_file)
def test_issue_06():
import logging
logger = logging.getLogger("test")
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
output = StringIO()
book = p.Book({"hoja1": [["datos", "de", "prueba"], [1, 2, 3]]})
book.save_to_memory("csv", output)
logger.debug(output.getvalue())
def test_issue_09():
p.book.LOCAL_UUID = 0
merged = p.Book()
sheet1 = p.Sheet(sheet=[[1, 2]])
sheet2 = p.Sheet(sheet=[[1, 2]])
merged += sheet1
merged += sheet2
eq_(merged[1].name, "pyexcel sheet_1")
def test_issue_10():
thedict = OrderedDict()
thedict.update({"Column 1": [1, 2, 3]})
thedict.update({"Column 2": [1, 2, 3]})
thedict.update({"Column 3": [1, 2, 3]})
p.save_as(adict=thedict, dest_file_name="issue10.xls")
newdict = p.get_dict(file_name="issue10.xls")
assert isinstance(newdict, OrderedDict) is True
eq_(thedict, newdict)
os.unlink("issue10.xls")
def test_issue_29():
a = [
# error case
["2016-03-31 10:59", "0123", "XS360_EU", "04566651561653122"],
# python types
[datetime(2016, 4, 15, 17, 52, 11), 123, False, 456193284757],
]
s = p.get_sheet(array=a)
content = dedent(
"""
pyexcel_sheet1:
+------------------+------+----------+-------------------+
| 2016-03-31 10:59 | 0123 | XS360_EU | 04566651561653122 |
+------------------+------+----------+-------------------+
| 15/04/16 | 123 | false | 456193284757 |
+------------------+------+----------+-------------------+"""
)
eq_(str(s), content.strip("\n"))
def test_issue_29_nominablesheet():
a = [
["date", "number", "misc", "long number"],
# error case
["2016-03-31 10:59", "0123", "XS360_EU", "04566651561653122"],
# python types
[datetime(2016, 4, 15, 17, 52, 11), 123, False, 456193284757],
]
s = p.get_sheet(array=a)
s.name_columns_by_row(0)
content = dedent(
"""
pyexcel_sheet1:
+------------------+--------+----------+-------------------+
| date | number | misc | long number |
+==================+========+==========+===================+
| 2016-03-31 10:59 | 0123 | XS360_EU | 04566651561653122 |
+------------------+--------+----------+-------------------+
| 15/04/16 | 123 | false | 456193284757 |
+------------------+--------+----------+-------------------+"""
)
eq_(str(s), content.strip("\n"))
def test_issue_51_orderred_dict_in_records():
from pyexcel.plugins.sources.pydata.records import RecordsReader
records = []
orderred_dict = OrderedDict()
orderred_dict.update({"Zebra": 10})
orderred_dict.update({"Hippo": 9})
orderred_dict.update({"Monkey": 8})
records.append(orderred_dict)
orderred_dict2 = OrderedDict()
orderred_dict2.update({"Zebra": 1})
orderred_dict2.update({"Hippo": 2})
orderred_dict2.update({"Monkey": 3})
records.append(orderred_dict2)
records_reader = RecordsReader(records)
array = list(records_reader.to_array())
expected = [["Zebra", "Hippo", "Monkey"], [10, 9, 8], [1, 2, 3]]
eq_(array, expected)
def test_issue_51_normal_dict_in_records():
from pyexcel.plugins.sources.pydata.records import RecordsReader
records = []
orderred_dict = {}
orderred_dict.update({"Zebra": 10})
orderred_dict.update({"Hippo": 9})
orderred_dict.update({"Monkey": 8})
records.append(orderred_dict)
orderred_dict2 = {}
orderred_dict2.update({"Zebra": 1})
orderred_dict2.update({"Hippo": 2})
orderred_dict2.update({"Monkey": 3})
records.append(orderred_dict2)
records_reader = RecordsReader(records)
array = list(records_reader.to_array())
expected = [["Hippo", "Monkey", "Zebra"], [9, 8, 10], [2, 3, 1]]
eq_(array, expected)
def test_issue_55_unicode_in_headers():
headers = [u"Äkkilähdöt", u"Matkakirjoituksia", u"Matkatoimistot"]
content = [headers, [1, 2, 3]]
sheet = p.Sheet(content)
sheet.name_columns_by_row(0)
eq_(sheet.colnames, headers)
def test_issue_60_chinese_text_in_python_2_stdout():
import sys
data = [["这", "是", "中", "文"], ["这", "是", "中", "文"]]
sheet = p.Sheet(data)
sys.stdout.write(repr(sheet))
def test_issue_60_chinese_text_in_python_2_stdout_on_book():
import sys
adict = {"Sheet 1": [["这", "是", "中", "文"], ["这", "是", "中", "文"]]}
book = p.Book()
book.bookdict = adict
sys.stdout.write(repr(book))
def test_issue_63_empty_array_crash_texttable_renderer():
sheet = p.Sheet([])
print(sheet)
def test_xls_issue_11():
data = [[1, 2]]
sheet = p.Sheet(data)
sheet2 = p.get_sheet(file_content=sheet.xls, file_type="XLS")
eq_(sheet.array, sheet2.array)
test_file = "xls_issue_11.JSON"
sheet2.save_as(test_file)
os.unlink(test_file)
def test_issue_68():
data = [[1]]
sheet = p.Sheet(data)
stream = sheet.save_to_memory("csv")
eq_(stream.read(), "1\r\n")
data = {"sheet": [[1]]}
book = p.Book(data)
stream = book.save_to_memory("csv")
eq_(stream.read(), "1\r\n")
def test_issue_74():
from decimal import Decimal
data = [[Decimal("1.1")]]
sheet = p.Sheet(data)
table = sheet.texttable
expected = "pyexcel sheet:\n+-----+\n| 1.1 |\n+-----+"
eq_(table, expected)
def test_issue_76():
from pyexcel._compact import StringIO
tsv_stream = StringIO()
tsv_stream.write("1\t2\t3\t4\n")
tsv_stream.write("1\t2\t3\t4\n")
tsv_stream.seek(0)
sheet = p.get_sheet(
file_stream=tsv_stream, file_type="csv", delimiter="\t"
)
data = [[1, 2, 3, 4], [1, 2, 3, 4]]
eq_(sheet.array, data)
def test_issue_83_csv_file_handle():
proc = psutil.Process()
test_file = os.path.join("tests", "fixtures", "bug_01.csv")
open_files_l1 = proc.open_files()
# start with a csv file
data = p.iget_array(file_name=test_file)
open_files_l2 = proc.open_files()
delta = len(open_files_l2) - len(open_files_l1)
# interestingly, no open file handle yet
assert delta == 0
# now the file handle get opened when we run through
# the generator
list(data)
open_files_l3 = proc.open_files()
delta = len(open_files_l3) - len(open_files_l1)
# caught an open file handle, the "fish" finally
assert delta == 1
# free the fish
p.free_resources()
open_files_l4 = proc.open_files()
# this confirms that no more open file handle
eq_(open_files_l1, open_files_l4)
def test_issue_83_file_handle_no_generator():
proc = psutil.Process()
test_files = [
os.path.join("tests", "fixtures", "bug_01.csv"),
os.path.join("tests", "fixtures", "test-single.csvz"),
os.path.join("tests", "fixtures", "date_field.xls"),
]
for test_file in test_files:
open_files_l1 = proc.open_files()
# start with a csv file
p.get_array(file_
|
staticaland/trips
|
trips/cli.py
|
Python
|
mit
| 302
| 0
|
# -*- coding: utf-8 -*-
import click
from trips import oslo
@click.command()
@click.argument('from_place')
@click.argument('to_place')
def main(from_place, to_place):
proposals = oslo.proposals(from_place, to_place)
oslo.pr
|
int_proposals(proposals)
if __name__ == "__main__":
|
main()
|
georgetown-analytics/classroom-occupancy
|
SensorDataCollection/Sensors/Asynchronous/DoorSensor.py
|
Python
|
mit
| 2,071
| 0.021246
|
import spidev
import RPi.GPIO as GPIO
import time
class DoorSensor:
def __init__(self, pin = None, verbose = False, dblogger = None):
self.results = ()
self.device = None
self.pin = pin
self.dblogger = dblogger
self.verbose = verbose
# assign default pin if none provided
if self.pin == None:
self.pin = 12
GPIO.setmode(GPIO.BOARD)
GPIO.setup(self.pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
self.device = GPIO
self.name = "Door"
def getName(self):
return self.name
def door_event(self, open):
evt = 'opened' if open else 'closed'
self.results = (evt,)
if open:
if self.verbose: print('door ' + evt)
else:
if self.verbose: print('door ' + evt)
time.sleep(0.5)
# log in DB if logger present
if self.dblogger is not None:
self.logLastReading(self.dblogger)
def waitForEvents(self):
d = self.device
pin = self.pin
switch = True
while True:
if d.input(pin): # if door is opened
if (switch):
self.door_event(True) # send door open event
switch = False # make sure it doesn't fire again
if not d.input(pin): # if door is closed
if not (switch):
self.door_event(False) # send door closed event
switch = True # make sure it doesn't fire again
d
|
ef logLastReading(self, dblogger):
cursor = dblogger.cursor
conn = dblogger.conn
loc = dblogger.location
tstamp = int(round(time.time
|
() * 1000))
cmd = "INSERT INTO Door (timestamp, location, door_status) VALUES (%s, %s, %s);"
cursor.execute(cmd, (tstamp, loc, self.results[0]))
conn.commit()
def getLastReading(self):
return self.results
def cleanUp(self):
GPIO.cleanup()
if __name__ == '__main__':
# initialize different sensors
vbose = True
from ..DBLogger import DBLogger
dbl = DBLogger()
ds = DoorSensor(dblogger=dbl, verbose=vbose)
# Listen to events
try:
ds.waitForEvents()
except KeyboardInterrupt:
if vbose: print("finishing.")
finally:
ds.cleanUp()
|
ssplatt/slack-zenoss
|
ZenPacks/community/Slack/interfaces.py
|
Python
|
gpl-2.0
| 960
| 0.001042
|
######################################################################
#
# Copyright 2012 Zenoss, Inc. All Rights Reserved.
#
######################################################################
from zope.interface import Interface
try:
from Products.Zuul.interfaces.actions import IActio
|
nContentInfo
ex
|
cept ImportError:
from Products.Zuul import IInfo as IActionContentInfo
from Products.Zuul.interfaces import IFacade
from Products.Zuul.form import schema
from Products.Zuul.utils import ZuulMessageFactory as _t
class ISlackActionContentInfo(IActionContentInfo):
slackUrl = schema.TextLine(
title=_t(u'Slack URL'),
order=90,
)
proxyUrl = schema.TextLine(
title=_t(u'Proxy URL'),
order=100,
)
proxyUsername = schema.TextLine(
title=_t(u'Proxy username'),
order=110,
)
proxyPassword = schema.Password(
title=_t(u'Proxy password'),
order=120,
)
|
AustereCuriosity/astropy
|
astropy/table/tests/test_subclass.py
|
Python
|
bsd-3-clause
| 2,488
| 0
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# TEST_UNICODE_LITERALS
from ... import table
from .. import pprint
class MyRow(table.Row):
def __str__(self):
return str(self.as_void())
class MyColumn(table.Column):
pass
class MyMaskedColumn(table.MaskedColumn):
pass
class MyTableColumns(table.TableColumns):
pass
class MyTableFormatter(pprint.TableFormatter):
pass
class MyTable(table.Table):
Row = MyRow
Column = MyColumn
MaskedColumn = MyMaskedColumn
TableColumns = MyTableColumns
TableFormatter = MyTableFormatter
def test_simple_subclass():
t = MyTable([[1, 2], [3, 4]])
row = t[0]
assert isinstance(row, MyRow)
assert isinstance(t['col0'], MyColumn)
assert isinstance(t.columns, MyTableColumns)
assert isinstance(t.formatter, MyTableFormatter)
t2 = MyTable(t)
row = t2[0]
assert isinstance(row, MyRow)
assert str(row) == '(1, 3)'
t3 = table.Table(t)
row = t3[0]
assert not isinstance(row, MyRow)
assert str(row) != '(1, 3)'
t = MyTable([[1, 2], [3, 4]], masked=True)
row = t[0]
assert isinstance(row, MyRow)
assert str(row) == '(1, 3)'
assert isinstance(t['col0'], MyMaskedColumn)
assert isinstance(t.formatter, MyTableFormatter)
class ParamsRow(table.Row):
"""
Row class that allows access to an arbitrary dict of parameters
stored as a dict object in the ``params`` column.
"""
def __getitem__(self, item):
if item not in self.colnames:
return super(ParamsRow, self).__getitem__('params')[item]
else:
return super(ParamsRow, self).__getitem__(item)
def keys(self):
out = [name for name in self.colna
|
mes if name != 'params']
params = [key.lower() for key in sorted(self['params'])]
return out + params
def values(self):
return [self[key] for key in self.keys()]
class ParamsTable(table.Table):
Row = ParamsRow
def test_params_ta
|
ble():
t = ParamsTable(names=['a', 'b', 'params'], dtype=['i', 'f', 'O'])
t.add_row((1, 2.0, {'x': 1.5, 'y': 2.5}))
t.add_row((2, 3.0, {'z': 'hello', 'id': 123123}))
assert t['params'][0] == {'x': 1.5, 'y': 2.5}
assert t[0]['params'] == {'x': 1.5, 'y': 2.5}
assert t[0]['y'] == 2.5
assert t[1]['id'] == 123123
assert list(t[1].keys()) == ['a', 'b', 'id', 'z']
assert list(t[1].values()) == [2, 3.0, 123123, 'hello']
|
DailyActie/Surrogate-Model
|
01-codes/tensorflow-master/tensorflow/python/framework/tensor_util_test.py
|
Python
|
mit
| 16,873
| 0.001008
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for tensor_util."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import state_ops
class TensorUtilTest(tf.test.TestCase):
def testFloat(self):
t = tensor_util.make_tensor_proto(10.0)
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape {}
float_val: 10.0
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array(10.0, dtype=np.float32), a)
def testFloatN(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0])
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self
|
.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTyped(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], dtype=tf.float32)
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.fl
|
oat32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTypeCoerce(self):
t = tensor_util.make_tensor_proto([10, 20, 30], dtype=tf.float32)
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTypeCoerceNdarray(self):
arr = np.asarray([10, 20, 30], dtype="int")
t = tensor_util.make_tensor_proto(arr, dtype=tf.float32)
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatSizes(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([[10.0, 20.0, 30.0]], dtype=np.float32), a)
def testFloatSizes2(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], shape=[3, 1])
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } dim { size: 1 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([[10.0], [20.0], [30.0]], dtype=np.float32),
a)
def testFloatSizesLessValues(self):
t = tensor_util.make_tensor_proto(10.0, shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 1 } dim { size: 3 } }
float_val: 10.0
""", t)
# No conversion to Ndarray for this one: not enough values.
def testFloatNpArrayFloat64(self):
t = tensor_util.make_tensor_proto(
np.array([[10.0, 20.0, 30.0]], dtype=np.float64))
self.assertProtoEquals("""
dtype: DT_DOUBLE
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\000\000\000\000\000\000$@\000\000\000\000\000\0004@\000\000\000\000\000\000>@"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float64, a.dtype)
self.assertAllClose(np.array([[10.0, 20.0, 30.0]], dtype=np.float64),
tensor_util.MakeNdarray(t))
def testFloatTypesWithImplicitRepeat(self):
for dtype, nptype in [
(tf.float32, np.float32), (tf.float64, np.float64)]:
t = tensor_util.make_tensor_proto([10.0], shape=[3, 4], dtype=dtype)
a = tensor_util.MakeNdarray(t)
self.assertAllClose(np.array([[10.0, 10.0, 10.0, 10.0],
[10.0, 10.0, 10.0, 10.0],
[10.0, 10.0, 10.0, 10.0]], dtype=nptype), a)
def testInt(self):
t = tensor_util.make_tensor_proto(10)
self.assertProtoEquals("""
dtype: DT_INT32
tensor_shape {}
int_val: 10
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int32, a.dtype)
self.assertAllClose(np.array(10, dtype=np.int32), a)
def testIntNDefaultType(self):
t = tensor_util.make_tensor_proto([10, 20, 30, 40], shape=[2, 2])
self.assertProtoEquals("""
dtype: DT_INT32
tensor_shape { dim { size: 2 } dim { size: 2 } }
tensor_content: "\\n\000\000\000\024\000\000\000\036\000\000\000(\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int32, a.dtype)
self.assertAllClose(np.array([[10, 20], [30, 40]], dtype=np.int32), a)
def testIntTypes(self):
for dtype, nptype in [
(tf.int32, np.int32),
(tf.uint8, np.uint8),
(tf.uint16, np.uint16),
(tf.int16, np.int16),
(tf.int8, np.int8)]:
# Test with array.
t = tensor_util.make_tensor_proto([10, 20, 30], dtype=dtype)
self.assertEquals(dtype, t.dtype)
self.assertProtoEquals("dim { size: 3 }", t.tensor_shape)
a = tensor_util.MakeNdarray(t)
self.assertEquals(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
# Test with ndarray.
t = tensor_util.make_tensor_proto(np.array([10, 20, 30], dtype=nptype))
self.assertEquals(dtype, t.dtype)
self.assertProtoEquals("dim { size: 3 }", t.tensor_shape)
a = tensor_util.MakeNdarray(t)
self.assertEquals(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
def testIntTypesWithImplicitRepeat(self):
for dtype, nptype in [
(tf.int64, np.int64),
(tf.int32, np.int32),
(tf.uint8, np.uint8),
(tf.uint16, np.uint16),
(tf.int16, np.int16),
(tf.int8, np.int8)]:
t = tensor_util.make_tensor_proto([10], shape=[3, 4], dtype=dtype)
a = tensor_util.MakeNdarray(t)
self.assertAllEqual(np.array([[10, 10, 10, 10],
[10, 10, 10, 10],
[10, 10, 10, 10]], dtype=nptype), a)
def testLong(self):
t = tensor_util.make_tensor_proto(10, dtype=tf.int64)
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_va
|
deedee1886-cmis/deedee1886-cmis-cs2
|
simpleprogram.py
|
Python
|
cc0-1.0
| 888
| 0.022523
|
first_name = raw_input("Typ
|
e your name here ")
last_name = raw_input("Type your last name here ")
print "Hello " + first_name +' '+last_name + " nice to meet you, welcome to the height conversion program"
def info():
a = raw_input("type your height here: ")
return a
def height_in_inches():
heigh
|
t_in_inches = float(info()) * 0.39370
return height_in_inches
def your_height_vs_average_height():
your_height_vs_average_height = float(height_in_inches()) - 69
return your_height_vs_average_height
def main():
print "you are {} inches tall,{} is your height compared to the average height (69 inches)".format(str(height_in_inches()),str(your_height_vs_average_height()))
#if the number returned is positive, that means you are that much taller than the average height, if the number returned is negative, then that means you are that much shorter than average height
main()
|
Southpaw-TACTIC/TACTIC
|
src/pyasm/application/maya/maya_builder_exec.py
|
Python
|
epl-1.0
| 2,188
| 0.00457
|
#!/usr/bin/python
###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
import sys
from .maya_environment import *
from .maya_app import *
from .maya_builder import *
def get_maya_builder_cmd(path, ticket):
# dynamically import Config. This can only be done by Tactic (no client)
from pyasm.common import Config
python = Config.get_value("services", "python")
cmd = '%s "%s" %s %s' % (python, __file__, path, ticket)
return cmd
def maya_builder_exec(path, ticket):
# run this executable as a separate system process
cmd = get_maya_builder_cmd(path, ticket)
print(cmd)
os.system(cmd)
if __name__ == '__main__':
executable = sys.argv[0]
path = sys.argv[1]
ticket = sys.argv[2]
# need to add these paths because they not currently in the
# windows environment
#sys.path.append("E:/sthpw/tactic/sthpw/src")
from pyasm.security import Batch
Batch()
file = open(path, 'r')
contents = file.read()
file.close()
# set up maya
from pyasm.application.common import BaseAppInfo
info = BaseAppInfo("maya")
from pyasm.common import Environment
tmpdir = "%s/temp/%s" % (Environment.get_t
|
mp_dir(), ticket)
info.set_tmpdir(tmpdir)
info.set_user(Environment.get_user_name() )
info.set_ticket(ticket)
info.set_up_maya(init=True)
env = info.get_app_env()
env.set_tmpdir(tmpdir)
# create the file builder
builder = info.get_builder()
builder.execute(contents)
# save the file
filepat
|
h = "%s/maya_render.ma" % env.get_tmpdir()
info.get_app().save(filepath)
from maya_introspect import MayaIntrospect
introspect = MayaIntrospect()
introspect.execute()
session_xml = introspect.get_session_xml()
# should reproduce glue!!!
file = open("%s/session.xml" % env.get_tmpdir(), "w" )
file.write(session_xml)
file.close()
|
DanAurea/Trisdanvalwen
|
communication/ComAPI/packetLogin.py
|
Python
|
mit
| 1,024
| 0.037109
|
from struct import pack, unpack
from time import time
from communication.ComAPI.packet import Packet
class PacketLogin(Packet):
"""Class for constructing binary data based
on a common API between client / server."""
def __init__(self):
super().__init__()
self.packetID = 3
def encode(self, username, avatar, position):
"""
Encode a message with API format
DRPG + PacketID + username length + username
+ avatar length + avatar + x + y + z
"""
bContainer = super().encode()
## Add position
## TODO: Be aware of byte order from client for portable version
bContainer = bContainer.__add__(pack(">B" , len(username) ))
bContainer = bContainer.__add__(username.encode())
bContainer = bContainer.__add__(pack(">B",len(avata
|
r)))
bContainer = bContainer.__add__(avatar.encode())
bContainer = bContainer.__add__(pack(">f", position[0]))
|
bContainer = bContainer.__add__(pack(">f", position[1]))
bContainer = bContainer.__add__(pack(">f", position[2]))
return bContainer
|
ixc/plata
|
examples/simple/views.py
|
Python
|
bsd-3-clause
| 1,810
| 0.001105
|
from django import forms
from django.contrib import messages
from django.core.exceptions import ValidationError
from django.db.models import ObjectDoesNotExist
from django.shortcuts import get_object_or_404, redirect, render_to_response
from django.template import RequestContext
from django.utils.translation import ugettext as _
from django.views import generic
from plata.contact.models import Contact
from plata.discount.models import Discount
from plata.shop.views import Shop
from
|
plata.shop.models import Order
from simple.models import Product
shop = Shop(Contact, Order, Discount)
product_list = generic.ListView.as_view(
queryset=Product.objects.filter(is_active=True),
template_name='pro
|
duct/product_list.html',
)
class OrderItemForm(forms.Form):
quantity = forms.IntegerField(label=_('quantity'), initial=1,
min_value=1, max_value=100)
def product_detail(request, object_id):
product = get_object_or_404(Product.objects.filter(is_active=True), pk=object_id)
if request.method == 'POST':
form = OrderItemForm(request.POST)
if form.is_valid():
order = shop.order_from_request(request, create=True)
try:
order.modify_item(product, form.cleaned_data.get('quantity'))
messages.success(request, _('The cart has been updated.'))
except ValidationError, e:
if e.code == 'order_sealed':
[messages.error(request, msg) for msg in e.messages]
else:
raise
return redirect('plata_shop_cart')
else:
form = OrderItemForm()
return render_to_response('product/product_detail.html', {
'object': product,
'form': form,
}, context_instance=RequestContext(request))
|
tangowhisky37/RaspiPythonProjects
|
OpenCV/CaptureVideoStream/CaptureVideoStream_v0.21.py
|
Python
|
gpl-3.0
| 10,532
| 0.028959
|
#!/home/pi/.virtualenvs/cv2/bin/python
from picamera.array import PiRGBArray
from picamera import PiCamera
import picamera
from time import sleep
import time
import cv2
import numpy as np
import sys
import datetime
import boto3
import subprocess
import os
import pyowm
import commands
import multiprocessing
import threading
import json
import shlex
import csv
#AWS Rekognition variables
bucket_target_var = "tw37-opencv"
#bucket_source_var = "new_image_name.jpg"
key_source_var = "orignal_trevor_1706.jpg"
bucket_source_var = "tw37-original"
#AWS Rekognition Code - Face Comparison
def compare_faces(bucket, key, bucket_target, key_target, threshold=80, region="us-west-2"):
def WeatherProcessing():
#OWM Weather Data Functions
owm = pyowm.OWM('xxxxxxxxxxxxxxxxxxx') # You MUST provide a valid API key
#Search for current weather in Melbourne (Australia)
observation = owm.weather_at_place('Melbourne,au')
w = observation.get_weather()
#Get Weather details
Wind = w.get_wind() # {'speed': 4.6, 'deg': 330}
WindText = "espeak -g 10 \" Current wind Speed and Direction is " + format(Wind) + " \" "
#print (WindText)
SWind = w.get_wind()['speed'] # 4
SWindText = "espeak -g 10 \" Current wind Speed is " + format(SWind) + " knots \" "
Humidity = w.get_humidity() # 87
HumidityText = "espeak -g 10 \" Current humidity is " + format(Humidity) + " percent \" "
Temperature = w.get_temperature('celsius') # {'temp_max': 10.5, 'temp': 9.7, 'temp_min': 9.0}
TemperatureText = "espeak -g 10 \" Current temperature is " + format(Temperature) + " degrees \" "
TemperatureAvg = w.get_temperature('celsius')['temp'] # {'temp_max': 10.5, 'temp': 9.7, 'temp_min': 9.0}
TemperatureAvgText = "espeak -g 10 \" Current temperature is " + format(TemperatureAvg) + " degrees \" "
Clouds = w.get_clouds()
Rainfall = w.get_rain()
Pressure = w.get_pressure()
#subprocess.call(shlex.split(WindText))
subprocess.call(shlex.split(SWindText))
subprocess.call(shlex.split(HumidityText))
subprocess.call(shlex.split(TemperatureAvgText))
#Output for debugging purpose
#print (" ")
#print (" ")
#print ("****************************************************************************************************")
#print ("Current wind Speed and Direction right now in Melbourne is = %s " %Wind)
#print ("Current Temperature in Melbourne is = %s" %Temperature)
#print ("Current Humidity in Melbourne is = %s Percent" %Humidity)
#print ("Cloud ceiling across Melbourne is %s thousand feet" %Clouds)
#print ("Current Rainfall across Melbourne is %s " %Rainfall)
#print ("Barometric Pressure across Melbourne is %s " %Pressure)
#print ("****************************************************************************************************")
#print (" ")
#print (" ")
#Face Matching Code Starts Here
rekognition = boto3.client("rekognition", region)
response = rekognition.compare_faces(
SourceImage={
"S3Object": {
"Bucket": bucket,
"Name": key,
}
},
TargetImage={
"S3Object": {
"Bucket": bucket_target,
"Name": key_target,
}
},
SimilarityThreshold=threshold,
)
#Reading JSON and converting into workable format
#print(response)
temp1 = json.dumps(response)
temp2 = json.loads(temp1)
#print(temp2['FaceMatches'])
print "Source Face Confidence in %s " %format(temp2['SourceImageFace']['Confidence'])
for match in temp2['FaceMatches']:
print "*******************************************"
print " "
print "Similarity between compared faces is %s " %format(temp2['FaceMatches'][0]['Similarity'])
print " "
print "*******************************************"
#Writing timestamp to log file
now = datetime.datetime.now()
outputFile = open('/opt/data/face_capture_time_log.txt', 'a')
outputWriter = csv.writer(outputFile)
tempArray = [now]
outputWriter.writerow(tempArray)
outputFile.close()
#Reading older timestamp from log file
proc = subprocess.Popen(["tail -n 1 /opt/data/face_capture_time_log.txt | cut -d : -f 2"], stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
temp = out.strip()
oldtimestampminute = int(temp)
#Subtracting seconds to find the difference
diff = oldtimestampminute - now.minute
if abs(diff) > 1: #abs takes care of negative values and provides a positive number as the result
print "*******************************************"
print " "
print " !!! Speech To Text happens here!!!! "
print " "
print "*******************************************"
subprocess.call('espeak \" Hi Trevor Welcome back \" ', shell=True)
WeatherProcessing()
else:
print "****************************************************************************"
print " "
print ("Ain't bothering you because we just spotted you less than a a min ago")
print " "
print "**********************************************
|
******************************"
for nomatch in temp2['UnmatchedFaces']:
print "Faces either don't match or are a poor ma
|
tch"
return
#Main Code Section Starts Here
face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
#nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
camera = PiCamera()
camera.resolution = (640,480)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(640,480))
s3 = boto3.client('s3')
time.sleep(2)
#Clearing the buffer before loading the first image
rawCapture.truncate(0)
while True:
#time.sleep(1)
camera.capture(rawCapture, format="bgr")
img = rawCapture.array
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21, 21), 0)
faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)
# iterate over all identified faces and try to find eyes
for (x, y, w, h) in faces:
cv2.rectangle(img, (x, y), (x+w, y+h), (0, 255, 0), 2)
roi_gray = gray[y:y+h, x:x+w]
roi_color = img[y:y+h, x:x+w]
#The code on the next three lines works and has been tested out
#Disabling it because it's not required for purposes of identification of faces
#eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
#for (ex,ey,ew,eh) in eyes:
#cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
#Detection of code for noses has not been validated or tested
#noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
#for (ex,ey,ew,eh) in noses:
# cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
#printing messages to the screen
print "At time "+time.strftime("%d/%m/%y-%H:%M:%S")+", found {0} faces in the picture!!!".format(len(faces))
#writing the image to the screen
font = cv2.FONT_HERSHEY_SIMPLEX
#cv2.putText(img, str(datetime.datetime.now().strftime("%d/%m/%y-%H/%M/%S")), (100,500), font, 4,(255,255,255),2)
cv2.putText(img, "DateTime
|
wateraccounting/wa
|
Collect/ETmonitor/Es_monthly.py
|
Python
|
apache-2.0
| 817
| 0.017136
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 03 09:36:48 2018
@author: tih
"""
import os
import sys
from DataAccess import DownloadData
def ma
|
in(Dir, Startdate='', Enddate='', latlim=[-60, 70], lonlim=[-180, 180], Waitbar = 1):
"""
This function downloads monthly ETmonitor data
Keyword arguments:
Dir -- 'C:/file/to/path/'
Startdate -- 'yyyy-mm-dd'
Enddate -- 'yyyy-mm-dd'
latlim -- [ymin, ymax] (values must be between -60 and 70)
lonlim -- [xmin, xmax] (values must be between -180 and 180)
"""
print '\nDownload monthly ETmonitor Soil Evaporation data for the period %s till %s' %(Sta
|
rtdate, Enddate)
Type = "es"
# Download data
DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Type, Waitbar)
if __name__ == '__main__':
main(sys.argv)
|
sonymoon/algorithm
|
src/main/python/geeksforgeeks/tree/max-path-sum.py
|
Python
|
apache-2.0
| 707
| 0.010463
|
# -*- coding: utf-8 -*-
from Node import Node
res = float("-inf")
# 只有顶层root节点才可能即经过左边 又经过右边
def maxPathSum(root):
if not root:
return 0
max_l = maxPathSum(root.left)
max_r = maxPathSum(root.right)
max_single = max(max(max_l, max_r) + root.data, root.data)
max_top = max(max_single, max_l + max_r + root.data)
global res
res = max(res, max_top)
return max_single
# Driver program
root = Node(10)
root.left = No
|
de(2)
root.right = Node(10);
root.left.left = Node(20);
root.left.right = Node(1);
root.right.right = Node(-25);
root.right.right.left = Node(3);
root.righ
|
t.right.right = Node(4);
maxPathSum(root)
print res
|
FedoraScientific/salome-smesh
|
doc/salome/examples/quality_controls_ex20.py
|
Python
|
lgpl-2.1
| 708
| 0.018362
|
# Aspect Ratio 3D
import SMESH_mechanic_tetra
import SMESH
smesh = SMESH_mechanic_tetra.smesh
mesh = SMESH_mechanic_tetra.mesh
salome = SMESH_mechanic_tetra.salome
# Criterion : ASPECT RATIO 3D > 4.5
ar_margin = 4.5
aFilter = smesh.GetFilter(SMESH.VOLUME, SMESH.FT_AspectRatio3D, SMESH.FT_MoreThan, ar_margin)
anIds = mesh.GetIdsFromFilter(aFilter)
# print the result
print "Criterion: Aspect Ratio 3D > ", ar_margin, " Nb = ", len(anIds)
j = 1
for i in range(len(anIds)):
if j > 20: j = 1; print ""
|
print anIds[i],
j = j + 1
pass
print ""
# create
|
a group
aGroup = mesh.CreateEmptyGroup(SMESH.VOLUME, "Aspect Ratio 3D > " + `ar_margin`)
aGroup.Add(anIds)
salome.sg.updateObjBrowser(1)
|
vileopratama/vitech
|
src/addons/l10n_bo/__openerp__.py
|
Python
|
mit
| 656
| 0
|
# -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2011 Cubic ERP - Teradata SAC. (https://cubicerp.
|
com).
{
"name": "Bolivia - Accounting",
"version": "2.0",
"description": """
Bolivian accounting chart and tax localization.
Plan contable boliviano e impuestos de acuerdo a disposiciones vigentes
""",
|
"author": "Cubic ERP",
"website": "https://cubicERP.com",
'category': 'Localization',
"depends": ["account"],
"data": [
"l10n_bo_chart.xml",
"account_tax.xml",
"account_chart_template.yml",
],
"installable": True,
}
|
PoornimaNayak/autotest-client-tests
|
linux-tools/perl_Carp_Clan/perl_Carp_Clan.py
|
Python
|
gpl-2.0
| 1,270
| 0.004724
|
#!/bin/python
import os, subprocess
import logging
from autotest.client import test
from autotest.client.shared import error
class perl_Carp_Clan(test.test):
"""
Autotest module for testing basic functionality
of perl_Carp_Clan
@author Kumuda G <kumuda.govind@in.ibm.com> ##
"""
version = 1
nfail = 0
path = ''
def initialize(self):
"""
Sets the overall fail
|
ure counter for the test.
"""
self.nfail = 0
loggin
|
g.info('\n Test initialize successfully')
def run_once(self, test_path=''):
"""
Trigger test run
"""
try:
os.environ["LTPBIN"] = "%s/shared" %(test_path)
ret_val = subprocess.Popen(['./perl-Carp-Clan.sh'], cwd="%s/perl_Carp_Clan" %(test_path))
ret_val.communicate()
if ret_val.returncode != 0:
self.nfail += 1
except error.CmdError, e:
self.nfail += 1
logging.error("Test Failed: %s", e)
def postprocess(self):
if self.nfail != 0:
logging.info('\n nfails is non-zero')
raise error.TestError('\nTest failed')
else:
logging.info('\n Test completed successfully ')
|
erramuzpe/C-PAC
|
CPAC/GUI/interface/windows/dataconfig_window.py
|
Python
|
bsd-3-clause
| 16,681
| 0.020562
|
import wx
from ..utils.generic_class import GenericClass
from ..utils.constants import control, dtype
import os
import yaml
import pkg_resources as p
import sys
ID_RUN_EXT = 11
ID_RUN_MEXT = 12
class DataConfig(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(self, parent, title="CPAC - Subject List Setup", size = (820,450))
mainSizer = wx.BoxSizer(wx.VERTICAL)
self.panel = wx.Panel(self)
self.window = wx.ScrolledWindow(self.panel)
self.page = GenericClass(self.window, "Subject List Setup")
self.page.add(label= "Anatomical File Path Template ",
control = control.TEXT_BOX,
name = "anatomicalTemplate",
type = dtype.STR,
comment = "File Path Template for Anatomical Files\n\n"
"Replace the site- and subject-level directories with %s.\n\n"
"See User Guide for more detailed instructions.",
values ="",
style= wx.EXPAND | wx.ALL,
size = (532,-1))
self.page.add(label= "Functional File Path Template ",
control = control.TEXT_BOX,
name = "functionalTemplate",
type = dtype.STR,
comment = "File Path Template for Functional Files\n\n"
"Replace the site- and subject-level directories with %s.\n\n"
"See User Guide for more detailed instructions.",
values ="",
style= wx.EXPAND | wx.ALL,
size = (532,-1))
self.page.add(label="Subjects to Include (Optional) ",
control=control.COMBO_BOX,
name = "subjectList",
type = dtype.COMBO,
comment = "Include only a sub-set of the subjects present in the folders defined above.\n\n"
"List subjects in this box (e.g., sub101, sub102) or provide the path to a\n"
"text file with one subject on each line.\n\n"
"If 'None' is specified, CPAC will include all subjects.",
values = "None")
self.page.add(label="Subjects to Exclude (Optional) ",
control=control.COMBO_BOX,
name = "exclusionSubjectList",
type = dtype.COMBO,
comment = "Exclude a sub-set of the subjects present in the folders defined above.\n\n"
"List subjects in this box (e.g., sub101, sub102) or provide the path to a\n"
"text file with one subject on each line.\n\n"
"If 'None' is specified, CPAC will not exclude any subjects.",
values = "None")
self.page.add(label= "Sites to Include (Optional) ",
control = control.TEXT_BOX,
name = "siteList",
type = dtype.STR,
comment = "Include only a sub-set of the sites present in the folders defined above.\n\n"
"List sites in this box (e.g., NYU, UCLA) or provide the path to a text\n"
"file with one site on each line.\n\n"
"If 'None' is specified, CPAC will include all sites.",
values ="None",
style= wx.EXPAND | wx.ALL,
size = (532,-1))
self.page.add(label="Scan Parameters File (Optional) ",
control=control.COMBO_BOX,
name = "scanParametersCSV",
type = dtype.COMBO,
comment = "Required for Slice Timing Correction.\n\n"
"Path to a .csv file containing information about scan acquisition parameters.\n\n"
"For instructions on how to create this file, see the User Guide.\n\n"
"If 'None' is specified, CPAC will skip Slice Timing Correction.",
values = "None")
self.page.add(label = "Output Directory ",
control = control.DIR_COMBO_BOX,
name = "outputSubjectListLocation",
type = dtype.STR,
comment = "Directory where CPAC should place subject list files.",
values = "")
self.page.add(label = "Subject List Name ",
control = control.TEXT_BOX,
name = "subjectListName",
type = dtype.STR,
comment = "A label to be appended to the generated " \
"subject list files.",
values = "",
style= wx.EXPAND | wx.ALL,
size = (300,-1))
self.page.set_sizer()
mainSizer.Add(self.window, 1, wx.EXPAND)
btnPanel = wx.Panel(self.panel, -1)
hbox = wx.BoxSizer(wx.HORIZONTAL)
self.multiscan = wx.CheckBox(btnPanel, -1, label = "Multiscan Data")
if 'linux' in sys.platform:
hbox.Add(self.multiscan,0, flag=wx.TOP, border=5)
else:
hbox.Add(self.multiscan, 0, flag=wx.RIGHT | wx.BOTTOM, border=5)
img = wx.Image(p.resource_filename('CPAC', 'GUI/resources/images/help.png'), wx.BITMAP_TYPE_ANY).ConvertToBitmap()
help = wx.BitmapButton(btnPanel, id=-1, bitmap=img,
pos=(10, 20), size = (img.GetWidth()+5, img.GetHeight()+5))
help.Bind(wx.EVT_BUTTON, self.onHelp)
if 'linux' in sys.platform:
hbox.Add(help, 0, flag = wx.TOP, border =5)
else:
hbox.Add(help, 0, flag=wx.RIGHT | wx.BOTTOM, border=5)
buffer2 = wx.StaticText(btnPanel, label = "\t")
hbox.Add(buffer2)
run_ext = wx.Button(btnPanel, ID_RUN_EXT, "Generate Subject Lists", (280,10), wx.DefaultSize, 0 )
self.Bind(wx.EVT_BUTTON, lambda event: self.save(event,'run'), id=ID_RUN_EXT)
|
hbox.Add( run_ext, 1, flag=wx.LEFT|wx.ALIGN_LEFT, border=10)
buffer = wx.StaticTex
|
t(btnPanel, label = "\t\t\t\t")
hbox.Add(buffer)
cancel = wx.Button(btnPanel, wx.ID_CANCEL, "Cancel",(220,10), wx.DefaultSize, 0 )
self.Bind(wx.EVT_BUTTON, self.cancel, id=wx.ID_CANCEL)
hbox.Add( cancel, 0, flag=wx.LEFT|wx.BOTTOM, border=5)
load = wx.Button(btnPanel, wx.ID_ADD, "Load Settings", (280,10), wx.DefaultSize, 0 )
self.Bind(wx.EVT_BUTTON, self.load, id=wx.ID_ADD)
hbox.Add(load, 0.6, flag=wx.LEFT|wx.BOTTOM, border=5)
save = wx.Button(btnPanel, wx.ID_SAVE, "Save Settings", (280,10), wx.DefaultSize, 0 )
self.Bind(wx.EVT_BUTTON, lambda event: self.save(event,'save'), id=wx.ID_SAVE)
hbox.Add(save, 0.6, flag=wx.LEFT|wx.BOTTOM, border=5)
btnPanel.SetSizer(hbox)
mainSizer.Add(btnPanel, 0.5, flag=wx.ALIGN_RIGHT|wx.RIGHT, border=20)
self.panel.SetSizer(mainSizer)
self.Show()
def cancel(self, event):
self.Close()
def onHelp(self, event):
comment = "Check the box only if the scans have different slice timing infomation."
wx.TipWindow(self, comment, 500)
def run(self, config):
try:
try:
config_map = yaml.load(open(config, 'r'))
out_location = os.path.join(\
os.path.realpath(config_map.get('outputSubjectListLocation')),\
'CPAC_subject_list_%s.yml' % config_map.get('subjectListName')[0])
except Exception, e:
print "Error loading data config file", e
raise
print "executing extract data"
multiscan = self.multiscan.IsChecked()
import CPAC
|
thumbor/remotecv
|
tests/test_image_processor.py
|
Python
|
mit
| 1,812
| 0.000552
|
from unittest import TestCase
from preggy import expect
from remotecv.image_processor import ImageProcessor
from tests import read_fixture
class ImageProcessorTest(TestCase):
def test_when_detector_unavailable(self):
image_processor = ImageProcessor()
with expect.error_to_happen(AttributeError):
image_processor.detect("feat", read_fixture("broken.jpg"))
def test_when_image_is_huge(self):
image_processor = ImageProcessor()
detect = image_processor.detect("all", read_fixture("huge_image.jpg"))
expect(detect).Not.to_be_empty()
def test_with_multiple_detectors(self):
image_processor = ImageProcessor()
detect = image_processor.detect(
"face+profile+glass", read_fixture("one_face.jpg")
)
expect(detect).Not.to_be_empty()
def test_when_not_animated_gif(self):
image_processor = ImageProcessor()
detect = image_processor.detect("face", read_fixture("one_face.gif"))
expect(detect).Not.to_be_empty()
def test_when_animated_gif(self):
image_processor = ImageProcessor()
detect = image_processor.detect("all", read_fixture("animated.gif"))
expect(detect).to_be_empty()
def test_feature_detection(self):
image_processor = ImageProcessor()
detect = image_processor.det
|
ect("feature", read_fixture("one_face.jpg"))
expect(detect).Not.to_be_empty()
def test_should_be_empty_when_invalid_image(self):
image_processor = ImageProcessor()
detect = image_processor.detect("all", b"asdas")
expect(detect).to_be_empty()
def te
|
st_should_ignore_gif(self):
image_processor = ImageProcessor()
detect = image_processor.detect("all", b"asdas")
expect(detect).to_be_empty()
|
AdamISZ/CoinSwapCS
|
coinswap/csjson.py
|
Python
|
gpl-3.0
| 12,462
| 0.004574
|
import os
import binascii
import json
from txjsonrpc.web.jsonrpc import Proxy
from txjsonrpc.web import jsonrpc
from twisted.web import server
from twisted.internet import reactor
try:
from OpenSSL import SSL
from twisted.internet import ssl
except:
pass
from .base import (get_current_blockheight, CoinSwapPublicParameters,
prepare_ecdsa_msg, FeePolicy)
from .alice import CoinSwapAlice
from .carol import CoinSwapCarol
from .configure import get_log, cs_single, get_network
from twisted.internet import defer
cslog = get_log()
def verifyCallback(connection, x509, errnum, errdepth, ok):
if not ok:
cslog.debug('invalid server cert: %s' % x509.get_subject())
return False
return True
class AltCtxFactory(ssl.ClientContextFactory):
def getContext(self):
ctx = ssl.ClientContextFactory.getContext(self)
#TODO: replace VERIFY_NONE with VERIFY_PEER when we have
#a real server with a valid CA signed cert. If that doesn't
#work it'll be possible to use self-signed certs, if they're distributed,
#by placing the cert.pem file and location in the config and uncommenting
#the ctx.load_verify_locations line.
#As it stands this is using non-authenticated certs, meaning MITM exposed.
ctx.set_verify(SSL.VERIFY_NONE, verifyCallback)
#ctx.load_verify_locations("/path/to/cert.pem")
return ctx
class CoinSwapJSONRPCClient(object):
"""A class encapsulating Alice's json rpc client.
"""
#Keys map to states as per description of CoinswapAlice
method_names = {0: "handshake",
1: "negotiate",
3: "tx0id_hx_tx2sig",
5: "sigtx3",
9: "secret",
12: "sigtx4"}
def __init__(self, host, port, json_callback=None, backout_callback=None,
usessl=False):
self.host = host
self.port = int(port)
#Callback fired on receiving response to send()
self.json_callback = json_callback
#Callback fired on receiving any response failure
self.backout_callback = backout_callback
if usessl:
self.proxy = Proxy('https://' + host + ":" + str(port) + "/",
ssl_ctx_factory=AltCtxFactory)
else:
self.proxy = Proxy('http://' + host + ":" + str(port) + "/")
def error(self, errmsg):
"""error callback implies we must back out at this point.
Note that this includes stateless queries, as any malformed
or non-response must be interpreted as malicious.
"""
self.backout_callback(str(errmsg))
def send_poll(self, method, callback, noncesig, sessionid, *args):
"""Stateless queries during the run use this call, and provide
their own callback for the response.
"""
d = self.proxy.callRemote("coinswap", sessionid, noncesig, method, *args)
d.addCallback(callback).addErrback(self.error)
def send_poll_unsigned(self, method, callback, *args):
"""Stateless queries outside of a coinswap run use
this query method; no nonce, sessionid or signature needed.
"""
d = self.proxy.callRemote(method, *args)
d.addCallback(callback).addErrback(self.error)
def send(self, method, *args):
"""Stateful queries share the same callback: the state machine
update function.
"""
d = self.proxy.callRemote(method, *args)
d.addCallback(self.json_callback).addErrback(self.error)
class CoinSwapCarolJSONServer(jsonrpc.JSONRPC):
def __init__(self, wallet, testing_mode=False, carol_class=CoinSwapCarol,
fail_carol_state=None):
self.testing_mode = testing_mode
self.wallet = wallet
self.carol_class = carol_class
self.fail_carol_state = fail_carol_state
self.carols = {}
self.fee_policy = FeePolicy(cs_single().config)
self.update_status()
jsonrpc.JSONRPC.__init__(self)
def render(self, request):
"""In order to respond appropriately to ill formed requests (no content,
or ill-formed content), we return a null response early in this class,
overriding render() from the base class, which unfortunately does not
correctly handle e.g. browser GET requests.
"""
request.content.seek(0, 0)
content = request.content.read()
try:
json.loads(content)
except:
return "Nothing here."
return jsonrpc.JSONRPC.render(self, request)
def refresh_carols(self):
"""Remove CoinSwapCarol instances that are flagged complete from
the running dict."""
to_remove = []
for k, v in self.carols.iteritems():
if v.completed:
to_remove.append(k)
for x in to_remove:
self.carols.pop(x, None)
cslog.info("Removed session: " + str(x) + " from tracking (finished).")
def update_status(self):
#initialise status variables from config; some are updated dynamically
c = cs_single().config
source_chain = c.get("SERVER", "source_chain")
destination_chain = c.get("SERVER", "destination_chain")
minimum_amount = c.getint("SERVER", "minimum_amount")
maximum_amount = c.getint("SERVER", "maximum_amount")
serverlockrange = c.get("SERVER", "server_locktime_range")
serverlockmin, serverlockmax = [int(x) for x in serverlockrange.split(",")]
clientlockrange = c.get("SERVER", "client_locktime_range")
clientlockmin, clientlockmax = [int(x) for x in clientlockrange.split(",")]
tx01_confirm_range = c.get("SERVER", "tx01_confirm_range")
tx01_confirm_min, tx01_confirm_max = [int(
x) for x in tx01_confirm_range.split(",")]
lock0 = c.getint("TIMEOUT", "lock_client")
status = {}
self.refresh_carols()
if len(self.carols.keys()) >= c.getint("SERVER",
"maximum_concurrent_coinswaps"):
status["busy"] = True
else:
status["busy"] = False
#real-time balance query; we source only from mixdepth 0
available_funds = self.wallet.get_balance_by_mixdepth(verbose=False)[0]
#The conservativeness here (switch
|
off if total avail < max
#is required for privacy (otherwise we leak our wallet balance in
#this costless query). Note that the wallet can be funded while
#the server is running.
if available_funds < maximum_amount:
status["busy"] = True
status["maximum_amount"] = -1
else:
status["maximum_amount"] = maximum_amount
status["minimum_amount"] = minimum_amount
status["sou
|
rce_chain"] = source_chain
status["destination_chain"] = destination_chain
status["cscs_version"] = cs_single().CSCS_VERSION
status["fee_policy"] = self.fee_policy.get_policy()
status["locktimes"] = {"lock_server": {"min": serverlockmin,
"max": serverlockmax},
"lock_client": {"min": clientlockmin,
"max": clientlockmax}}
status["tx01_confirm_wait"] = {"min": tx01_confirm_min,
"max": tx01_confirm_max}
status["testnet"] = True if get_network() else False
return status
def jsonrpc_status(self):
"""This can be polled at any time.
The call to get_balance_by_mixdepth does not involve sync,
so is not resource intensive.
"""
return self.update_status()
def set_carol(self, carol, sessionid):
"""Once a CoinSwapCarol object has been initiated, its session id
has been set, so it can be added to the dict.
"""
#should be computationally infeasible; note *we* set this.
assert sessionid not in self.carols
self.carols[sessionid] = carol
return True
def
|
ecrespo/pyurldownload_file
|
pyurldownload_file.py
|
Python
|
gpl-3.0
| 906
| 0.027594
|
#!/usr/bin/python3
import requests
import bs4
import sys
url = input('Enter URL -> ')
pattern = input('Enter search pattern-> ')
html = requests.get(url)
dir_download = "./download/"
if html.text.find("400 Bad Request") != -1:
print ("Bad Request")
sys.exit()
soup = bs4.BeautifulSoup(html.text)
tags = soup('a')
for tag in tags:
url_path = tag.get('href')
text = str(url_path)
if text.find(pattern) == -1:
continue
domain = url.split("http://")[1].split("/")[0]
urldownload = "http://" + domain + text
print ("Retrieve: {0},{1}".format(tag.contents[0],urldownload))
file = text.split("/")[-1]
path_and_file = dir_download + file
try:
r = requests.get(urldownload)
with open(path_and
|
_file, "wb") as f:
f.write(r.content)
except ConnectionError:
print("Can't
|
download file: {0}".format(file))
except HTTPError:
print("Can't download file: {0}".format(file))
f.close()
|
Haikson/virtenviro
|
virtenviro/registration/urls_new.py
|
Python
|
apache-2.0
| 396
| 0.005051
|
# ~*~ coding: utf-8 ~*~
f
|
rom django.conf.urls import *
import virtenviro.registration.views
import django.contrib.auth.views
urlpatterns = [
url(r'^signup/$', virtenviro.registration.views.signup),
url(r'^login/$', django.contrib.auth.views.login, {"template_name": "virtenviro/accounts/login.html"}),
url(r'^logout/$', django.contrib.auth.views.logout_the
|
n_login, name='logout'),
]
|
airodactyl/qutebrowser
|
qutebrowser/browser/webkit/webkittab.py
|
Python
|
gpl-3.0
| 30,341
| 0
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper over our (QtWebKit) WebView."""
import re
import functools
import xml.etree.ElementTree
from PyQt5.QtCore import (pyqtSlot, Qt, QEvent, QUrl, QPoint, QTimer, QSizeF,
QSize)
from PyQt5.QtGui import QKeyEvent, QIcon
from PyQt5.QtWebKitWidgets import QWebPage, QWebFrame
from PyQt5.QtWebKit import QWebSettings
from PyQt5.QtPrintSupport import QPrinter
from qutebrowser.browser import browsertab, shared
from qutebrowser.browser.webkit import (webview, tabhistory, webkitelem,
webkitsettings)
from qutebrowser.utils import qtutils, usertypes, utils, log, debug
from qutebrowser.qt import sip
class WebKitAction(browsertab.AbstractAction):
"""QtWebKit implementations related to web actions."""
action_class = QWebPage
action_base = QWebPage.WebAction
def exit_fullscreen(self):
raise browsertab.UnsupportedOperationError
def save_page(self):
"""Save the current page."""
raise browsertab.UnsupportedOperationError
def show_source(self, pygments=False):
self._show_source_pygments()
class WebKitPrinting(browsertab.AbstractPrinting):
"""QtWebKit implementations related to printing."""
def check_pdf_support(self):
pass
def check_printer_support(self):
pass
def check_preview_support(self):
pass
def to_pdf(self, filename):
printer = QPrinter()
printer.setOutputFileName(filename)
self.to_printer(printer)
def to_printer(self, printer, callback=None):
self._widget.print(printer)
# Can't find out whether there was an error...
if callback is not None:
callback(True)
class WebKitSearch(browsertab.AbstractSearch):
"""QtWebKit implementations related to searching on the page."""
def __init__(self, parent=None):
super().__init__(parent)
self._flags = QWebPage.FindFlags(0)
def _call_cb(self, callback,
|
found, text, flags, caller):
"""Call the given callback if it's non-None.
Delays the call via a QTimer so the website is re-rendered in between.
Args:
callback: What to call
found: If the text was found
text: The text searched for
flags: The flags searched with
caller: Name of the caller.
"""
found_te
|
xt = 'found' if found else "didn't find"
# Removing FindWrapsAroundDocument to get the same logging as with
# QtWebEngine
debug_flags = debug.qflags_key(
QWebPage, flags & ~QWebPage.FindWrapsAroundDocument,
klass=QWebPage.FindFlag)
if debug_flags != '0x0000':
flag_text = 'with flags {}'.format(debug_flags)
else:
flag_text = ''
log.webview.debug(' '.join([caller, found_text, text, flag_text])
.strip())
if callback is not None:
QTimer.singleShot(0, functools.partial(callback, found))
def clear(self):
self.search_displayed = False
# We first clear the marked text, then the highlights
self._widget.findText('')
self._widget.findText('', QWebPage.HighlightAllOccurrences)
def search(self, text, *, ignore_case='never', reverse=False,
result_cb=None):
# Don't go to next entry on duplicate search
if self.text == text and self.search_displayed:
log.webview.debug("Ignoring duplicate search request"
" for {}".format(text))
return
# Clear old search results, this is done automatically on QtWebEngine.
self.clear()
self.text = text
self.search_displayed = True
self._flags = QWebPage.FindWrapsAroundDocument
if self._is_case_sensitive(ignore_case):
self._flags |= QWebPage.FindCaseSensitively
if reverse:
self._flags |= QWebPage.FindBackward
# We actually search *twice* - once to highlight everything, then again
# to get a mark so we can navigate.
found = self._widget.findText(text, self._flags)
self._widget.findText(text,
self._flags | QWebPage.HighlightAllOccurrences)
self._call_cb(result_cb, found, text, self._flags, 'search')
def next_result(self, *, result_cb=None):
self.search_displayed = True
found = self._widget.findText(self.text, self._flags)
self._call_cb(result_cb, found, self.text, self._flags, 'next_result')
def prev_result(self, *, result_cb=None):
self.search_displayed = True
# The int() here makes sure we get a copy of the flags.
flags = QWebPage.FindFlags(int(self._flags))
if flags & QWebPage.FindBackward:
flags &= ~QWebPage.FindBackward
else:
flags |= QWebPage.FindBackward
found = self._widget.findText(self.text, flags)
self._call_cb(result_cb, found, self.text, flags, 'prev_result')
class WebKitCaret(browsertab.AbstractCaret):
"""QtWebKit implementations related to moving the cursor/selection."""
@pyqtSlot(usertypes.KeyMode)
def _on_mode_entered(self, mode):
if mode != usertypes.KeyMode.caret:
return
self.selection_enabled = self._widget.hasSelection()
self.selection_toggled.emit(self.selection_enabled)
settings = self._widget.settings()
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, True)
if self._widget.isVisible():
# Sometimes the caret isn't immediately visible, but unfocusing
# and refocusing it fixes that.
self._widget.clearFocus()
self._widget.setFocus(Qt.OtherFocusReason)
# Move the caret to the first element in the viewport if there
# isn't any text which is already selected.
#
# Note: We can't use hasSelection() here, as that's always
# true in caret mode.
if not self.selection_enabled:
self._widget.page().currentFrame().evaluateJavaScript(
utils.read_file('javascript/position_caret.js'))
@pyqtSlot(usertypes.KeyMode)
def _on_mode_left(self, _mode):
settings = self._widget.settings()
if settings.testAttribute(QWebSettings.CaretBrowsingEnabled):
if self.selection_enabled and self._widget.hasSelection():
# Remove selection if it exists
self._widget.triggerPageAction(QWebPage.MoveToNextChar)
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, False)
self.selection_enabled = False
def move_to_next_line(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToNextLine
else:
act = QWebPage.SelectNextLine
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_prev_line(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousLine
else:
act = QWebPage.SelectPreviousLine
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_next_char(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToNextChar
el
|
abawchen/leetcode
|
solutions/062_unique_paths.py
|
Python
|
mit
| 837
| 0.002389
|
# A robot is located at the top-left corner of a m x n grid (marked 'Start' in the diagram below).
# The robot can only move either down or right at any point in time. The robot is trying to reach the bottom-right corner of the grid (marked 'Finish' in the diagram below).
# How many possible unique paths are there?
# Above is a 3 x 7 grid. How many possible unique paths are there?
# Note: m and n will be at most 100.
class Solution:
# @param {integer} m
# @param {integer} n
# @return {integer}
def uniquePaths(self, m, n):
# (x+y)!/x!y!
|
if m == 0 or n == 0:
return 0
paths = [[1 for x in range(n)] for x in range(m)]
for i in range(1, m):
for j in range(1, n):
paths[i][j] = paths[i-
|
1][j] + paths[i][j-1]
return paths[-1][-1]
|
tuxfux-hlp-notes/python-batches
|
archieves/batch-62/oop/movie.py
|
Python
|
gpl-3.0
| 538
| 0.031599
|
#!/usr/bin/python
# inbuild - Except
|
ion - python exception - parent clas
# InvalidAgeException - Child class
class InvalidAgeException(Exception):
def __init__(self,age):
self.age = age
def validate_age(age):
if age > 18:
return "welcome to the movie!!!"
else:
raise InvalidAgeException(age)
|
if __name__ == '__main__':
age = input("please enter your age:")
try:
validate_age(age)
except InvalidAgeException as e:
print "Buddy!! Go home and sleep you are still {}".format(e.age)
else:
print validate_age(age)
|
SimplyAutomationized/python-snap7
|
example/read_multi.py
|
Python
|
mit
| 2,021
| 0.000495
|
"""
Example ussage of the read_multi_vars function
This was tested against a S7-319 CPU
"""
import ctypes
import struct
import snap7
from snap7.common import check_error
from snap7.snap7types import S7DataItem, S7AreaDB, S7WLByte
client = snap7.client.Client()
client.connect('10.100.5.2', 0, 2)
data_items = (S7DataItem * 3)()
data_items[0].Area = ctypes.c_int32(S7AreaDB)
data_items[0].WordLen = ctypes.c_int32(S7WLByte)
data_items[0].Result = ctypes.c_int32(0)
data_items[0].DBNumber = ctypes.c_int32(200)
data_items[0].Start = ctypes.c_int32(16)
data_items[0].Amount = ctypes.c_int32(4) # reading a REAL, 4 bytes
data_items[1].Area = ctypes.c_int32(S7AreaDB)
data_items[1].WordLen = ctypes.c_int32(S7WLByte)
data_items[1].Result = ctypes.c_int32(0)
data_items[1].DBNumber = ctypes.c_int32(200)
data_items[1].Start = ctypes.c_int32(12)
data_items[1].Amount = ctypes.c_int32(4) # reading a REAL, 4 bytes
data_items[2].Area = ctypes.c_int32(S7AreaDB)
data_items[2].WordLen = ctypes.c_int32(S7WLByte)
data_items[2].Result = ctypes.c_int32(0)
data_items[2].DBNumber = ctypes.c_int32(200)
data_items[2].Start = ctypes.c_int32(2)
data_items[2].Amount = ctypes.c_int32(2) # reading an INT, 2 bytes
# create buffers to receive the data
# use the Amount attribute on each item to size the buffer
for di in data_ite
|
ms:
# create the buffer
buffer
|
= ctypes.create_string_buffer(di.Amount)
# cast the pointer to the buffer to the required type
pBuffer = ctypes.cast(ctypes.pointer(buffer),
ctypes.POINTER(ctypes.c_uint8))
di.pData = pBuffer
result, data_items = client.read_multi_vars(data_items)
for di in data_items:
check_error(di.Result)
# struct formats
fmts = ['>f', '>f', '>h']
# unpack and print the result of each read
for i in range(0, len(data_items)):
fmt = fmts[i]
di = data_items[i]
foo = ''.join([chr(di.pData[i]) for i in range(0, di.Amount)])
fnum = struct.unpack(fmt, foo)[0]
print(fnum)
client.disconnect()
client.destroy()
|
zacherytapp/wedding
|
weddingapp/apps/gallery/admin.py
|
Python
|
bsd-3-clause
| 269
| 0.022305
|
fro
|
m django.contrib import admin
from .models import Gallery
class GalleryAdmin(admin.ModelAdmin):
list_display = ('title', 'gallery_image', 'alt_text', 'display_order', 'visibility')
search_fields = ['title', 'alt_text']
admin.site.register(Gallery, GalleryAdmi
|
n)
|
rvs/gpdb
|
src/test/tinc/tincrepo/mpp/gpdb/tests/storage/lib/dbstate.py
|
Python
|
apache-2.0
| 3,081
| 0.009737
|
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in wri
|
ting, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import tinctest
from mpp.li
|
b.PSQL import PSQL
from mpp.lib.gpdbverify import GpdbVerify
from mpp.lib.config import GPDBConfig
from mpp.models import MPPTestCase
class DbStateClass(MPPTestCase):
def __init__(self,methodName,config=None):
if config is not None:
self.config = config
else:
self.config = GPDBConfig()
self.gpverify = GpdbVerify(config=self.config)
super(DbStateClass,self).__init__(methodName)
def check_system(self):
'''
@summary: Check whether the system is up and sync. Exit out if not
'''
cmd ="select count(*) from gp_segment_configuration where content<> -1 ;"
count_all = PSQL.run_sql_command(cmd, flags ='-q -t', dbname='postgres')
cmd ="select count(*) from gp_segment_configuration where content<> -1 and mode = 's' and status = 'u';"
count_up_and_sync = PSQL.run_sql_command(cmd, flags ='-q -t', dbname='postgres')
if count_all.strip() != count_up_and_sync.strip() :
raise Exception('The cluster is not in up/sync ............')
else:
tinctest.logger.info("\n Starting New Test: System is up and in sync .........")
def check_catalog(self,dbname=None, alldb=True, online=False, testname=None, outputFile=None, host=None, port=None):
'''1. Run gpcheckcat'''
(errorCode, hasError, gpcheckcat_output, repairScriptDir) = self.gpverify.gpcheckcat(dbname=dbname, alldb=alldb, online=online, testname=testname, outputFile=outputFile, host=host, port=port)
if errorCode != 0:
raise Exception('GpCheckcat failed with errcode %s '% (errorCode))
def check_mirrorintegrity(self, master=False):
'''Runs checkmirrorintegrity(default), check_mastermirrorintegrity(when master=True) '''
(checkmirror, fix_outfile) = self.gpverify.gpcheckmirrorseg(master=master)
if not checkmirror:
self.fail('Checkmirrorseg failed. Fix file location : %s' %fix_outfile)
tinctest.logger.info('Successfully completed integrity check')
def run_validation(self):
'''
1. gpcheckcat
2. checkmirrorintegrity
3. check_mastermirrorintegrity
'''
self.check_catalog()
self.check_mirrorintegrity()
if self.config.has_master_mirror():
self.check_mirrorintegrity(master=True)
|
lgp171188/xpens
|
xpens/app/migrations/0005_make_description_optional_provide_default_value.py
|
Python
|
agpl-3.0
| 606
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0004_add_default_ordering_of_categories_by_name'),
]
operations = [
migrations.AlterField(
|
model_name='category',
name='description',
field=models.TextField(default='', blank=True),
|
),
migrations.AlterField(
model_name='expense',
name='description',
field=models.TextField(default='', blank=True),
),
]
|
giraldeau/python-augeas
|
setup.py
|
Python
|
lgpl-2.1
| 420
| 0.045238
|
#!/usr/bin/env python
"""
setup.py file for augeas
"""
import os
prefix = os.environ.get("prefix", "/usr")
from distutils.core import setup
setup (name = 'python-augeas',
version = '0.3.0',
autho
|
r = "Harald Hoyer",
author_email = "augeas-devel@redhat.com",
descri
|
ption = """Python bindings for Augeas""",
py_modules = [ "augeas" ],
url = "http://augeas.net/",
)
|
google/iree
|
build_tools/kokoro/gcp_ubuntu/cmake/linux/riscv64/tests/lit.cfg.py
|
Python
|
apache-2.0
| 756
| 0
|
import os
import sys
import lit.formats
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
config.name = "RISC-V tests"
config.test_format = lit.formats.ShTest(True)
config.suffixes = [".run"]
config.environment["BUILD_RISCV_DIR"] = os.ge
|
tenv("BUILD_RISCV_DIR")
config.environment["TEST_CMD"] = (
"%s -cpu rv64,x-v=true,x-k=true,vlen=256,elen=64,vext_spec=v1.0"
" -L %s/sysroot " %
(os.getenv("QEMU_RV64_BIN"), os.getenv("RISCV_TOOLCHAIN_ROOT")))
config.environment["TEST_MODULE_CMD"] = (
"%s %s/iree/tools/iree-run-module --driver=dylib" %
(config.envi
|
ronment["TEST_CMD"], os.getenv("BUILD_RISCV_DIR")))
config.test_exec_root = os.getenv("BUILD_RISCV_DIR") + \
"/tests"
|
rduivenvoorde/QGIS
|
tests/src/python/test_python_repr.py
|
Python
|
gpl-2.0
| 15,282
| 0.003807
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for core additions
From build dir, run: ctest -R PyPythonRepr -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Denis Rouzaud'
__date__ = '05.06.2018'
__copyright__ = 'Copyright 2015, The QGIS Project'
import qgis # NOQA
from PyQt5.QtCore import QVariant
from qgis.testing import unittest, start_app
from qgis.core import (
QgsGeometry,
QgsPoint,
QgsPointXY,
QgsCircle,
QgsCircularString,
QgsCompoundCur
|
ve,
QgsCurvePolygon,
QgsEllipse,
QgsLineString,
QgsMultiCurve,
QgsRectangle,
QgsExpression,
QgsField,
QgsError,
QgsMimeDataUtils,
QgsVector,
QgsVect
|
or3D,
QgsVectorLayer,
QgsReferencedPointXY,
QgsReferencedRectangle,
QgsCoordinateReferenceSystem,
QgsCoordinateTransform,
QgsProject,
QgsClassificationRange,
QgsBookmark,
QgsLayoutMeasurement,
QgsLayoutPoint,
QgsLayoutSize,
QgsUnitTypes,
QgsConditionalStyle,
QgsTableCell,
QgsProperty,
QgsVertexId,
QgsReferencedGeometry,
QgsProviderRegistry,
QgsRasterLayer,
QgsAnnotationLayer,
QgsPointCloudLayer,
QgsVectorTileLayer,
QgsMeshLayer,
QgsDataSourceUri,
QgsDoubleRange,
QgsIntRange,
QgsDefaultValue
)
start_app()
class TestPython__repr__(unittest.TestCase):
def testQgsGeometryRepr(self):
g = QgsGeometry()
self.assertEqual(g.__repr__(), '<QgsGeometry: null>')
p = QgsPointXY(123.456, 987.654)
g = QgsGeometry.fromPointXY(p)
self.assertTrue(g.__repr__().startswith('<QgsGeometry: Point (123.456'))
g = QgsGeometry(QgsLineString([QgsPoint(0, 2), QgsPoint(1010, 2)]))
g = g.densifyByCount(1000)
# long strings must be truncated for performance -- otherwise they flood the console/first aid output
self.assertTrue(g.__repr__().startswith('<QgsGeometry: LineString (0 2,'))
self.assertTrue(
g.__repr__().endswith('...>'))
self.assertEqual(len(g.__repr__()), 1018)
def testQgsPointRepr(self):
p = QgsPoint(123.456, 987.654, 100)
self.assertTrue(p.__repr__().startswith('<QgsPoint: PointZ (123.456'))
def testQgsPointXYRepr(self):
p = QgsPointXY(123.456, 987.654)
self.assertTrue(p.__repr__().startswith('<QgsPointXY: POINT(123.456'))
def testQgsReferencedPointXYRepr(self):
p = QgsReferencedPointXY(QgsPointXY(123.456, 987.654), QgsCoordinateReferenceSystem('EPSG:4326'))
self.assertTrue(p.__repr__().startswith('<QgsReferencedPointXY: POINT(123.456'))
self.assertTrue(p.__repr__().endswith('(EPSG:4326)>'))
def testQgsCircleRepr(self):
c = QgsCircle(QgsPoint(1, 1), 2.0)
self.assertEqual(c.__repr__(), '<QgsCircle: Circle (Center: Point (1 1), Radius: 2, Azimuth: 0)>')
def testQgsCircularstringRepr(self):
cs = QgsCircularString(QgsPoint(1, 2), QgsPoint(2, 3), QgsPoint(3, 4))
self.assertEqual(cs.__repr__(), '<QgsCircularString: CircularString (1 2, 2 3, 3 4)>')
def testQgsClassificationRange(self):
c = QgsClassificationRange('from 1 to 2', 1, 2)
self.assertEqual(c.__repr__(), "<QgsClassificationRange: 'from 1 to 2'>")
def testQgsCompoundcurveRepr(self):
cs = QgsCircularString(QgsPoint(1, 2), QgsPoint(2, 3), QgsPoint(3, 4))
cc = QgsCompoundCurve()
cc.addCurve(cs)
self.assertEqual(cc.__repr__(), '<QgsCompoundCurve: CompoundCurve (CircularString (1 2, 2 3, 3 4))>')
def testQgsCurvepolygonRepr(self):
cp = QgsCurvePolygon()
cs = QgsCircularString(QgsPoint(1, 10), QgsPoint(2, 11), QgsPoint(1, 10))
cp.setExteriorRing(cs)
self.assertEqual(cp.__repr__(), '<QgsCurvePolygon: CurvePolygon (CircularString (1 10, 2 11, 1 10))>')
def testQgsEllipseRepr(self):
e = QgsEllipse(QgsPoint(1, 2), 2.0, 3.0)
self.assertEqual(e.__repr__(), '<QgsEllipse: Ellipse (Center: Point (1 2), Semi-Major Axis: 3, Semi-Minor Axis: 2, Azimuth: 180)>')
def testQgsLineStringRepr(self):
ls = QgsLineString([QgsPoint(10, 2), QgsPoint(10, 1), QgsPoint(5, 1)])
self.assertEqual(ls.__repr__(), '<QgsLineString: LineString (10 2, 10 1, 5 1)>')
def testQgsMulticurveRepr(self):
mc = QgsMultiCurve()
cs = QgsCircularString(QgsPoint(1, 10), QgsPoint(2, 11), QgsPoint(3, 12))
mc.addGeometry(cs)
cs2 = QgsCircularString(QgsPoint(4, 20), QgsPoint(5, 22), QgsPoint(6, 24))
mc.addGeometry(cs2)
self.assertEqual(mc.__repr__(), '<QgsMultiCurve: MultiCurve (CircularString (1 10, 2 11, 3 12),CircularString (4 20, 5 22, 6 24))>')
def testQgsMultilineStringRepr(self):
ml = QgsGeometry.fromMultiPolylineXY(
[
[QgsPointXY(0, 0), QgsPointXY(1, 0), QgsPointXY(1, 1), QgsPointXY(2, 1), QgsPointXY(2, 0), ],
[QgsPointXY(3, 0), QgsPointXY(3, 1), QgsPointXY(5, 1), QgsPointXY(5, 0), QgsPointXY(6, 0), ]
]
)
self.assertEqual(ml.constGet().__repr__(), '<QgsMultiLineString: MultiLineString ((0 0, 1 0, 1 1, 2 1, 2 0),(3 0, 3 1, 5 1, 5 0, 6 0))>')
def testQgsMultiPointRepr(self):
wkt = "MultiPoint ((10 30),(40 20),(30 10),(20 10))"
mp = QgsGeometry.fromWkt(wkt)
self.assertEqual(mp.constGet().__repr__(), '<QgsMultiPoint: MultiPoint ((10 30),(40 20),(30 10),(20 10))>')
def testQgsMultipolygonRepr(self):
mp = QgsGeometry.fromMultiPolygonXY([
[[QgsPointXY(1, 1),
QgsPointXY(2, 2),
QgsPointXY(1, 2),
QgsPointXY(1, 1)]],
[[QgsPointXY(2, 2),
QgsPointXY(3, 3),
QgsPointXY(3, 1),
QgsPointXY(2, 2)]]
])
self.assertEqual(mp.constGet().__repr__(), '<QgsMultiPolygon: MultiPolygon (((1 1, 2 2, 1 2, 1 1)),((2 2, 3 3, 3 1, 2 2)))>')
def testQgsPolygonRepr(self):
p = QgsGeometry.fromPolygonXY(
[[QgsPointXY(0, 0),
QgsPointXY(2, 0),
QgsPointXY(2, 2),
QgsPointXY(0, 2),
QgsPointXY(0, 0)]])
self.assertEqual(p.constGet().__repr__(), '<QgsPolygon: Polygon ((0 0, 2 0, 2 2, 0 2, 0 0))>')
def testQgsRectangleRepr(self):
r = QgsRectangle(1, 2, 3, 4)
self.assertEqual(r.__repr__(), '<QgsRectangle: 1 2, 3 4>')
def testQgsReferencedRectangleRepr(self):
r = QgsReferencedRectangle(QgsRectangle(1, 2, 3, 4), QgsCoordinateReferenceSystem('EPSG:4326'))
self.assertEqual(r.__repr__(), '<QgsReferencedRectangle: 1 2, 3 4 (EPSG:4326)>')
def testQgsReferencedGeometryRepr(self):
g = QgsReferencedGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 2)), QgsCoordinateReferenceSystem('EPSG:4326'))
self.assertEqual(g.__repr__(), '<QgsReferencedGeometry: Point (1 2) (EPSG:4326)>')
def testQgsCoordinateReferenceSystem(self):
crs = QgsCoordinateReferenceSystem()
self.assertEqual(crs.__repr__(), '<QgsCoordinateReferenceSystem: invalid>')
crs = QgsCoordinateReferenceSystem('EPSG:4326')
self.assertEqual(crs.__repr__(), '<QgsCoordinateReferenceSystem: EPSG:4326>')
crs.setCoordinateEpoch(2021.3)
self.assertEqual(crs.__repr__(), '<QgsCoordinateReferenceSystem: EPSG:4326 @ 2021.3>')
crs = QgsCoordinateReferenceSystem('EPSG:3111')
self.assertEqual(crs.__repr__(), '<QgsCoordinateReferenceSystem: EPSG:3111>')
def testQgsCoordinateTransform(self):
xform = QgsCoordinateTransform()
self.assertEqual(xform.__repr__(), '<QgsCoordinateTransform: NULL to NULL>')
xform = QgsCoordinateTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem(), QgsProject.instance())
self.assertEqual(xform.__repr__(), '<QgsCoordinateTransform: EPSG:4326 to NULL>')
xform = QgsCoordinateTransform(QgsCoordinateReferenceSystem(), QgsCoordi
|
Starch/paperwork
|
src/paperwork/deps.py
|
Python
|
gpl-3.0
| 7,285
| 0.000549
|
#!/usr/bin/env python3
import locale
import os
import sys
try:
# suppress warnings from GI
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('Poppler', '0.18')
gi.require_version('PangoCairo', '1.0')
except:
pass
try:
from gi.repository import GLib
from gi.repository import Gtk
g_gtk_available = True
except Exception as exc:
g_gtk_available = False
"""
Some modules/libraries required by Paperwork cannot be installed with pip or
easy_install. So we will just help the user detecting what is missing and what
must be installed
"""
LANGUAGES = {
None: {
'aspell': 'en',
'tesseract': 'eng',
},
'fr': {
'aspell': 'fr',
'tesseract': 'fra',
},
'de': {
'aspell': 'de',
'tesseract': 'deu',
},
'en': {
'aspell': 'en',
'tesseract': 'eng',
},
}
DEFAULT_LANG = {
'aspell': '<your language>',
'tesseract': '<your language>',
}
MODULES = [
(
'Gtk', 'gi.repository.Gtk',
{
'debian': 'gir1.2-gtk-3.0',
'fedora': 'gtk3',
'gentoo': 'x11-libs/gtk+',
'linuxmint': 'gir1.2-gtk-3.0',
'ubuntu': 'gir1.2-gtk-3.0',
'suse': 'python-gtk',
},
),
]
DATA_FILES = [
(
"Gnome symbolic icons"
" (/usr/share/icons/gnome/(...)/go-previous-symbolic.svg",
[
"/usr/share/icons/gnome/scalable/actions/go-previous-symbolic.svg",
"/usr/local/share/icons/gnome/scalable/"
"actions/go-previous-symbolic.svg",
],
{
'debian': 'gnome-icon-theme-symbolic',
'ubuntu': 'gnome-icon-theme-symbolic',
'fedora': 'gnome-icon-theme-symbolic',
}
),
]
def get_language():
lang = locale.getdefaultlocale()[0]
if lang:
lang = lang[:2]
if lang in LANGUAGES:
return LANGUAGES[lang]
print(
"[WARNING] Unable to figure out the exact language package to install"
)
return DEFAULT_LANG
def find_missing_modules():
"""
look for dependency that setuptools cannot check or that are too painful to
install with setuptools
"""
missing_modules = []
for module in MODULES:
try:
__import__(module[1])
except ImportError:
missing_modules.append(module)
return missing_modules
def find_missing_ocr(lang):
"""
OCR tools are a little bit more tricky
"""
missing = []
try:
from pyocr import pyocr
ocr_tools = pyocr.get_available_tools()
except ImportError:
print (
"[WARNING] Couldn't import Pyocr. Will assume OCR tool is not"
" installed yet"
)
ocr_tools = []
if len(ocr_tools) > 0:
langs = ocr_tools[0].get_available_languages()
else:
langs = []
missing.append(
(
'Tesseract', '(none)',
{
'debian': 'tesseract-ocr',
'fedora': 'tesseract',
'gentoo': 'app-text/tesseract',
'linuxmint': 'tesseract-ocr',
'ubuntu': 'tesseract-ocr',
},
)
)
if (len(langs) <= 0 or lang['tesseract'] not in langs):
missing.append(
(
'Tesseract language data', '(none)',
{
'debian': ('tesseract-ocr-%s' % lang['tesseract']),
'fedora': ('tesseract-langpack-%s' % lang['tesseract']),
'linuxmint': ('tesseract-ocr-%s' % lang['tesseract']),
|
'ubuntu': ('tesseract-ocr-%s' % lang['tesseract']),
},
)
)
return missing
def find_missing_dict(lang):
if os.name == "nt":
return []
import enchant
missing = []
try:
enchant.request_dict(lang['aspell'])
except:
missing.append(
(
'Dictionary', '(none)',
{
'debian': ('aspell-%s'
|
% lang['aspell']),
'fedora': ('aspell-%s' % lang['aspell']),
'gentoo': ('aspell-%s' % lang['aspell']),
'linuxmint': ('aspell-%s' % lang['aspell']),
'ubuntu': ('aspell-%s' % lang['aspell']),
}
)
)
return missing
def _check_cairo():
from gi.repository import Gtk
class CheckCairo(object):
def __init__(self):
self.test_successful = False
def on_draw(self, widget, cairo_ctx):
self.test_successful = True
Gtk.main_quit()
return False
def quit(self):
try:
Gtk.main_quit()
except Exception as exc:
print("FAILED TO STOP GTK !")
print("ASSUMING python-gi-cairo is not installed")
print("Exception was: {}".format(exc))
sys.exit(1)
check = CheckCairo()
try:
from gi.repository import GLib
window = Gtk.Window()
da = Gtk.DrawingArea()
da.set_size_request(200, 200)
da.connect("draw", check.on_draw)
window.add(da)
da.queue_draw()
window.show_all()
GLib.timeout_add(2000, check.quit)
Gtk.main()
window.set_visible(False)
while Gtk.events_pending():
Gtk.main_iteration()
except Exception:
pass
return check.test_successful
def check_cairo():
missing = []
if not g_gtk_available:
success = False
else:
success = _check_cairo()
if not success:
missing.append(
(
'python-gi-cairo', '(none)',
{
'debian': 'python3-gi-cairo',
'linuxmint': 'python3-gi-cairo',
'ubuntu': 'python3-gi-cairo',
},
)
)
return missing
def check_sane():
import pyinsane2
missing = []
try:
pyinsane2.init()
pyinsane2.exit()
except:
missing.append(
(
'libsane', '(none)',
{
'debian': 'libsane',
'fedora': 'sane-backends',
'linuxmint': 'libsane',
'ubuntu': 'libsane',
},
)
)
return missing
def find_missing_data_files():
missings = []
for (user_name, file_paths, packages) in DATA_FILES:
missing = True
for file_path in file_paths:
if os.path.exists(file_path):
missing = False
break
if missing:
missings.append((user_name, "(none)", packages))
return missings
def find_missing_dependencies():
lang = get_language()
# missing_modules is an array of
# (common_name, python_name, { "distrib": "package" })
missing = []
missing += find_missing_modules()
missing += find_missing_ocr(lang)
missing += find_missing_dict(lang)
missing += find_missing_data_files()
missing += check_cairo()
missing += check_sane()
return missing
|
Phlos/LASIF_scripts
|
lasif_code/ad_src_tf_phase_misfit.py
|
Python
|
gpl-3.0
| 13,183
| 0.00129
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
An implementation of the time frequency phase misfit and adjoint source after
Fichtner et al. (2008).
:copyright:
Lion Krischer (krischer@geophysik.uni-muenchen.de), 2013
:license:
GNU General Public License, Version 3
(http://www.gnu.org/copyleft/gpl.html)
"""
import warnings
import numexpr as ne
import numpy as np
import obspy
from obspy.signal.interpolation import lanczos_interpolation
from lasif import LASIFAdjointSourceCalculationError
from lasif.adjoint_sources import time_frequency, utils
eps = np.spacing(1)
def adsrc_tf_phase_misfit(t, data, synthetic, min_period, max_period,
plot=False, max_criterion=7.0):
"""
:rtype: dictionary
:returns: Return a dictionary with three keys:
* adjoint_source: The calculated adjoint source as a numpy array
* misfit: The misfit value
* messages: A list of strings giving additional hints to what happened
in the calculation.
"""
# Assumes that t starts at 0. Pad your data if that is not the case -
# Parts with zeros are essentially skipped making it fairly efficient.
assert t[0] == 0
messages = []
# Internal sampling interval. Some explanations for this "magic" number.
# LASIF's preprocessing allows no frequency content with smaller periods
# than min_period / 2.2 (see function_templates/preprocesssing_function.py
# for details). Assuming most users don't change this, this is equal to
# the Nyquist frequency and the largest possible sampling interval to
# catch everything is min_period / 4.4.
#
# The current choice is historic as changing does (very slightly) chance
# the calculated misfit and we don't want to disturb inversions in
# progress. The difference is likely minimal in any case. We might have
# same aliasing into the lower frequencies but the filters coupled with
# the TF-domain weighting will get rid of them in essentially all
# realistically occurring cases.
dt_new = max(float(int(min_period / 3.0)), t[1] - t[0])
# New time axis
ti = utils.matlab_range(t[0], t[-1], dt_new)
# Make sure its odd - that avoid having to deal with some issues
# regarding frequency bin interpolation. Now positive and negative
# frequencies will always be all symmetric. Data is assumed to be
# tapered in any case so no problem are to be expected.
if not len(ti) % 2:
ti = ti[:-1]
# Interpolate both signals to the new time axis - this massively speeds
# up the whole procedure as most signals are highly oversampled. The
# adjoint source at the end is re-interpolated to the original sampling
# points.
original_data = data
original_synthetic = synthetic
data = lanczos_interpolation(
data=data, old_start=t[0], old_dt=t[1] - t[0], new_start=t[0],
new_dt=dt_new, new_npts=len(ti), a=8, window="blackmann")
synthetic = lanczos_interpolation(
data=synthetic, old_start=t[0], old_dt=t[1] - t[0], new_start=t[0],
new_dt=dt_new, new_npts=len(ti), a=8, window="blackmann")
original_time = t
t = ti
# -------------------------------------------------------------------------
# Compute time-frequency representations
# Window width is twice the minimal period.
width = 2.0 * min_period
# Compute time-frequency representation of the cross-correlation
_, _, tf_cc = time_frequency.time_frequency_cc_difference(
t, data, synthetic, width)
# Compute the time-frequency representation of the synthetic
tau, nu, tf_synth = time_frequency.time_frequency_transform(t, synthetic,
width)
# -------------------------------------------------------------------------
# compute tf window and weighting function
# noise taper: down-weight tf amplitudes that are very low
tf_cc_abs = np.abs(tf_cc)
m = tf_cc_abs.max() / 10.0 # NOQA
weight = ne.evaluate("1.0 - exp(-(tf_cc_abs ** 2) / (m ** 2))")
nu_t = nu.T
# highpass filter (periods longer than max_period are sup
|
pressed
# exponentially)
weight *= (1.0 - np.exp(-(nu_t * max_period) ** 2))
# lowpass filter (periods shorter than min_period are suppressed
# exponentially)
nu_t_large = np.zeros(nu_t.shape)
nu_t_small = np.zeros(nu_t.shape)
thres = (nu_t <= 1.0 / min_period)
nu_t_large[np.invert(thres)] = 1.0
nu_t_small[thres] = 1.0
weight *= (np.exp(-10.0 * np.abs(nu_t * min_period - 1.0)) * nu_t_large +
nu_t_small)
# normalisation
weight /
|
= weight.max()
# computation of phase difference, make quality checks and misfit ---------
# Compute the phase difference.
# DP = np.imag(np.log(m + tf_cc / (2 * m + np.abs(tf_cc))))
DP = np.angle(tf_cc)
# Attempt to detect phase jumps by taking the derivatives in time and
# frequency direction. 0.7 is an emperical value.
abs_weighted_DP = np.abs(weight * DP)
_x = abs_weighted_DP.max() # NOQA
test_field = ne.evaluate("weight * DP / _x")
criterion_1 = np.sum([np.abs(np.diff(test_field, axis=0)) > 0.7])
criterion_2 = np.sum([np.abs(np.diff(test_field, axis=1)) > 0.7])
criterion = np.sum([criterion_1, criterion_2])
# Compute the phase misfit
dnu = nu[1] - nu[0]
i = ne.evaluate("sum(weight ** 2 * DP ** 2)")
# inserted by Nienke Blom, 22-11-2016
weighted_DP = ne.evaluate("weight * DP")
phasediff_integral = float(ne.evaluate("sum(weighted_DP * dnu * dt_new)"))
mean_delay = np.mean(weighted_DP)
wDP = weighted_DP.flatten()
wDP_thresh = wDP[abs(wDP) > 0.1 * max(wDP, key=lambda x: abs(x))]
median_delay = np.median(wDP_thresh)
max_delay = max(wDP, key=lambda x: abs(x))
phase_misfit = np.sqrt(i * dt_new * dnu)
# Sanity check. Should not occur.
if np.isnan(phase_misfit):
msg = "The phase misfit is NaN."
raise LASIFAdjointSourceCalculationError(msg)
# The misfit can still be computed, even if not adjoint source is
# available.
if criterion > max_criterion:
warning = ("Possible phase jump detected. Misfit included. No "
"adjoint source computed. Criterion: %.1f - Max allowed "
"criterion: %.1f" % (criterion, max_criterion))
warnings.warn(warning)
messages.append(warning)
ret_dict = {
"adjoint_source": None,
"misfit_value": phase_misfit,
"details": {"messages": messages,
#"weighted_DP": weighted_DP,
#"weight": weight,
#"DP": DP,
"mean_delay": mean_delay, # added NAB 30-8-2017
"phasediff_integral": phasediff_integral, # added NAB 22-11-2016, edited 30-8-2017
"median_delay": median_delay, # added NAB 22-11-2016, edited 30-8-2017
"max_delay": max_delay} # added NAB 31-8-2017
}
return ret_dict
# Make kernel for the inverse tf transform
idp = ne.evaluate(
"weight ** 2 * DP * tf_synth / (m + abs(tf_synth) ** 2)")
# Invert tf transform and make adjoint source
ad_src, it, I = time_frequency.itfa(tau, idp, width)
# Interpolate both signals to the new time axis
ad_src = lanczos_interpolation(
# Pad with a couple of zeros in case some where lost in all
# these resampling operations. The first sample should not
# change the time.
data=np.concatenate([ad_src.imag, np.zeros(100)]),
old_start=tau[0],
old_dt=tau[1] - tau[0],
new_start=original_time[0],
new_dt=original_time[1] - original_time[0],
new_npts=len(original_time), a=8, window="blackmann")
# Divide by the misfit and change sign.
ad_src /= (phase_misfit + eps)
ad_src = -1.0 * np.diff(ad_src) / (t[1] - t[0])
# Taper at both ends. Exploit ObsPy to not have to deal with all the
# nasty things.
ad_src = \
obspy.Trace(ad_src).taper(max_percentage=0
|
non-official-SD/base
|
src/tools/ceguidemo/menumanager.py
|
Python
|
gpl-3.0
| 375
| 0.045333
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Menu manager.
A dictionary of menu instan
|
ce() functions (see MenuFactory)
"""
class MenuManager:
menus = { }
def register(menuName, menuFactory):
MenuManager.menus[menuName] = menuFactory
register = stati
|
cmethod(register)
def get(menuName):
return MenuManager.menus[menuName]()
get = staticmethod(get)
|
mizhi/tictic
|
xmpp.py
|
Python
|
gpl-2.0
| 3,475
| 0.010072
|
# Copyright (c) - 2013 Mitchell Peabody.
# See COPYRIGHT.txt and LICENSE.txt in the root of this project.
from functools import wraps
import inspect
import logging
from google.appengine.api import xmpp, users
from google.appengine.ext.webapp import xmpp_handlers
from model import User, Variable, Value
from nl import parser
logger = logging.getLogger()
def extract_email(sender):
"""XMPP sender is <address>
|
/<client>. I think. This may be wrong, but that's
what it appears like in GoogleTalk.
:param xmpp_sender: The sender of the message.
:returns: email of the sender
"""
email = sender.split("/")
return email[0]
def describe(description, params = ""):
def _describe(func):
setattr(func, "__command_help__",
"/{0} {1}\n\t{2}".format(func.__name__.replace("_command", ""),
params,
|
description))
return func
return _describe
class XmppHandler(xmpp_handlers.CommandHandler):
@describe("Disremembers the user.")
def forget_command(self, message = None):
email = extract_email(message.sender)
try:
sender = users.User(email)
except users.UserNotFoundError as e:
message.reply("You don't seem to have an account that I can find.")
appuser = User.all().filter("info = ", sender).get()
if appuser:
message.reply("Okay, I'm forgetting you, {sender}.".format(sender = sender.email()))
for variable in user.variables:
for value in variable.values:
value.delete()
variable.delete()
appuser.delete()
else:
message.reply("I don't know you.")
@describe("Give help for the system.")
def help_command(self, message = None):
"""Returns a list of all the commands defined on this class.
"""
members = [x[1] for x in inspect.getmembers(self, inspect.ismethod)
if x[0].endswith("_command") and hasattr(x[1], "__command_help__")]
reply = "\n".join(map(lambda x: getattr(x, "__command_help__"),
members))
message.reply(reply)
def text_message(self, message):
email = extract_email(message.sender)
try:
sender = users.User(email)
except users.UserNotFoundError as e:
message.reply("You don't seem to have an account that I can find.")
appuser = User.all().filter("info = ", sender).get()
if not appuser:
appuser = User(info = sender)
appuser.put()
try:
datum = parser.parse(message.body)
except parser.ParseException as e:
message.reply("I couldn't understand you. (Message was: {msg})".format(msg = e.message))
variable = Variable.all().filter("name = ", datum["variable"]).get()
if not variable:
variable = Variable(name = datum["variable"], user = appuser)
variable.put()
value = Value(value = datum["value"], variable = variable)
value.put()
message.reply("I've logged variable {variable} as being {value}".format(sender = email,
variable = datum["variable"],
value = datum["value"]))
|
mairin/anaconda
|
pyanaconda/ntp.py
|
Python
|
gpl-2.0
| 6,933
| 0.001731
|
#
# Copyright (C) 2012-2013 Red Hat, Inc.
#
# This copyrighted material i
|
s made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms
|
and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Vratislav Podzimek <vpodzime@redhat.com>
#
"""
Module facilitating the work with NTP servers and NTP daemon's configuration
"""
import re
import os
import tempfile
import shutil
import ntplib
import socket
from pyanaconda import isys
from pyanaconda.threads import threadMgr, AnacondaThread
from pyanaconda.constants import THREAD_SYNC_TIME_BASENAME
NTP_CONFIG_FILE = "/etc/chrony.conf"
#example line:
#server 0.fedora.pool.ntp.org iburst
SRV_LINE_REGEXP = re.compile(r"^\s*server\s*([-a-zA-Z.0-9]+)\s*[a-zA-Z]+\s*$")
class NTPconfigError(Exception):
"""Exception class for NTP related problems"""
pass
def ntp_server_working(server):
"""
Tries to do an NTP request to the $server (timeout may take some time).
:param server: hostname or IP address of an NTP server
:type server: string
:return: True if the given server is reachable and working, False otherwise
:rtype: bool
"""
client = ntplib.NTPClient()
try:
client.request(server)
except ntplib.NTPException:
return False
# address related error
except socket.gaierror:
return False
# socket related error
# (including "Network is unreachable")
except socket.error:
return False
return True
def get_servers_from_config(conf_file_path=NTP_CONFIG_FILE,
srv_regexp=SRV_LINE_REGEXP):
"""
Goes through the chronyd's configuration file looking for lines starting
with 'server'.
:return: servers found in the chronyd's configuration
:rtype: list
"""
ret = list()
try:
with open(conf_file_path, "r") as conf_file:
for line in conf_file:
match = srv_regexp.match(line)
if match:
ret.append(match.group(1))
except IOError as ioerr:
msg = "Cannot open config file %s for reading (%s)" % (conf_file_path,
ioerr.strerror)
raise NTPconfigError(msg)
return ret
def save_servers_to_config(servers, conf_file_path=NTP_CONFIG_FILE,
srv_regexp=SRV_LINE_REGEXP, out_file_path=None):
"""
Replaces the servers defined in the chronyd's configuration file with
the given ones. If the out_file is not None, then it is used for the
resulting config.
:type servers: iterable
:param out_file_path: path to the file used for the resulting config
"""
try:
old_conf_file = open(conf_file_path, "r")
except IOError as ioerr:
msg = "Cannot open config file %s for reading (%s)" % (conf_file_path,
ioerr.strerror)
raise NTPconfigError(msg)
try:
if out_file_path:
new_conf_file = open(out_file_path, "w")
else:
(fildes, temp_path) = tempfile.mkstemp()
new_conf_file = os.fdopen(fildes, "w")
except IOError as ioerr:
if out_file_path:
msg = "Cannot open new config file %s "\
"for writing (%s)" % (out_file_path, ioerr.strerror)
else:
msg = "Cannot open temporary file %s "\
"for writing (%s)" % (temp_path, ioerr.strerror)
raise NTPconfigError(msg)
heading = "# These servers were defined in the installation:\n"
#write info about the origin of the following lines
new_conf_file.write(heading)
#write new servers
for server in servers:
new_conf_file.write("server " + server + " iburst\n")
#copy non-server lines from the old config and skip our heading
for line in old_conf_file:
if not srv_regexp.match(line) and line != heading:
new_conf_file.write(line)
old_conf_file.close()
new_conf_file.close()
if not out_file_path:
try:
stat = os.stat(conf_file_path)
# Use copy rather then move to get the correct selinux context
shutil.copy(temp_path, conf_file_path)
os.chmod(conf_file_path, stat.st_mode)
os.unlink(temp_path)
except OSError as oserr:
msg = "Cannot replace the old config with "\
"the new one (%s)" % (oserr.strerror)
raise NTPconfigError(msg)
def one_time_sync(server, callback=None):
"""
Synchronize the system time with a given NTP server. Note that this
function is blocking and will not return until the time gets synced or
querying server fails (may take some time before timeouting).
:param server: NTP server
:param callback: callback function to run after sync or failure
:type callback: a function taking one boolean argument (success)
:return: True if the sync was successful, False otherwise
"""
client = ntplib.NTPClient()
try:
results = client.request(server)
isys.set_system_time(int(results.tx_time))
success = True
except ntplib.NTPException:
success = False
except socket.gaierror:
success = False
if callback is not None:
callback(success)
return success
def one_time_sync_async(server, callback=None):
"""
Asynchronously synchronize the system time with a given NTP server. This
function is non-blocking it starts a new thread for synchronization and
returns. Use callback argument to specify the function called when the
new thread finishes if needed.
:param server: NTP server
:param callback: callback function to run after sync or failure
:type callback: a function taking one boolean argument (success)
"""
thread_name = "%s_%s" % (THREAD_SYNC_TIME_BASENAME, server)
if threadMgr.get(thread_name):
#syncing with the same server running
return
threadMgr.add(AnacondaThread(name=thread_name, target=one_time_sync,
args=(server, callback)))
|
joke2k/faker
|
faker/providers/color/color.py
|
Python
|
mit
| 10,362
| 0.000676
|
"""Internal module for human-friendly color generation.
.. important::
End users of this library should not use anything in this module.
Code adapted from:
- https://github.com/davidmerfield/randomColor (CC0)
- https://github.com/kevinwuhoo/randomcolor-py (MIT License)
Additional reference from:
- https://en.wikipedia.org/wiki/HSL_and_HSV
"""
import colorsys
import math
import random
import sys
from typing import TYPE_CHECKING, Dict, Hashable, Optional, Sequence, Tuple
if TYPE_CHECKING:
from ...factory import Generator
from ...typing import HueType
COLOR_MAP: Dict[str, Dict[str, Sequence[Tuple[int, int]]]] = {
"monochrome": {
"hue_range": [(0, 0)],
"lower_bounds": [
(0, 0),
(100, 0),
],
},
"red": {
"hue_range": [(-26, 18)],
"lower_bounds": [
(20, 100),
(30, 92),
(40, 89),
(50, 85),
(60, 78),
(70, 70),
(80, 60),
(90, 55),
(100, 50),
],
},
"orange": {
"hue_range": [(19, 46)],
"lower_bounds": [
(20, 100),
(30, 93),
(40, 88),
(50, 86),
(60, 85),
(70, 70),
(100, 70),
],
},
"yellow": {
"hue_range": [(47, 62)],
"lower_bounds": [
(25, 100),
(40, 94),
(50, 89),
(60, 86),
(70, 84),
(80, 82),
(90, 80),
(100, 75),
],
},
"green": {
"hue_range": [(63, 178)],
"lower_bounds": [
(30, 100),
(40, 90),
(50, 85),
(60, 81),
(70, 74),
(80, 64),
(90, 50),
(100, 40),
],
},
"blue": {
"hue_range": [(179, 257)],
"lower_bounds": [
(20, 100),
(30, 86),
(40, 80),
(50, 74),
(60, 60),
(70, 52),
(80, 44),
(90, 39),
(100, 35),
],
},
"purple": {
"hue_range": [(258, 282)],
"lower_bounds": [
(20, 100),
(30, 87),
(40, 79),
(50, 70),
(60, 65),
(70, 59),
(80, 52),
(90, 45),
(100, 42),
],
},
"pink": {
"hue_range": [(283, 334)],
"lower_bounds": [
(20, 100),
(30, 90),
(40, 86),
(60, 84),
(80, 80),
(90, 75),
(100, 73),
],
},
}
class RandomColor:
"""Implement random color generation in a human-friendly way.
This helper class encapsulates the internal implementation and logic of the
:meth:`color() <faker.providers.color.Provider.color>` method.
"""
def __init__(self, generator: Optional["Generator"] = None, seed: Optional[Hashable] = None) -> None:
self.colormap = COLOR_MAP
# Option to specify a seed was not removed so this class
# can still be tested independently w/o generators
if generator:
self.random = generator.random
else:
self.seed = seed if seed else random.randint(0, sys.maxsize)
self.random = random.Random(self.seed)
for color_name, color_attrs in self.colormap.items():
lower_bounds: Sequence[Tuple[int, int]] = color_attrs["lower_bounds"]
s_min, b_max = lower_bounds[0]
s_max, b_min = lower_bounds[-1]
self.colormap[color_name]["saturation_range"] = [(s_min, s_max)]
self.colormap[color_name]["brightness_ra
|
nge"] = [(b_min, b_max)]
def generate(
self,
hue: Optional[HueType] = None,
|
luminosity: Optional[str] = None,
color_format: str = "hex",
) -> str:
"""Generate a color.
Whenever :meth:`color() <faker.providers.color.Provider.color>` is
called, the arguments used are simply passed into this method, and this
method handles the rest.
"""
# First we pick a hue (H)
h = self.pick_hue(hue)
# Then use H to determine saturation (S)
s = self.pick_saturation(h, hue, luminosity)
# Then use S and H to determine brightness (B).
b = self.pick_brightness(h, s, luminosity)
# Then we return the HSB color in the desired format
return self.set_format((h, s, b), color_format)
def pick_hue(self, hue: Optional[HueType]) -> int:
"""Return a numerical hue value."""
hue_ = self.random_within(self.get_hue_range(hue))
# Instead of storing red as two separate ranges,
# we group them, using negative numbers
if hue_ < 0:
hue_ += 360
return hue_
def pick_saturation(self, hue: int, hue_name: Optional[HueType], luminosity: Optional[str]) -> int:
"""Return a numerical saturation value."""
if luminosity is None:
luminosity = ""
if luminosity == "random":
return self.random_within((0, 100))
if isinstance(hue_name, str) and hue_name == "monochrome":
return 0
s_min, s_max = self.get_saturation_range(hue)
if luminosity == "bright":
s_min = 55
elif luminosity == "dark":
s_min = s_max - 10
elif luminosity == "light":
s_max = 55
return self.random_within((s_min, s_max))
def pick_brightness(self, h: int, s: int, luminosity: Optional[str]) -> int:
"""Return a numerical brightness value."""
if luminosity is None:
luminosity = ""
b_min = self.get_minimum_brightness(h, s)
b_max = 100
if luminosity == "dark":
b_max = b_min + 20
elif luminosity == "light":
b_min = (b_max + b_min) // 2
elif luminosity == "random":
b_min = 0
b_max = 100
return self.random_within((b_min, b_max))
def set_format(self, hsv: Tuple[int, int, int], color_format: str) -> str:
"""Handle conversion of HSV values into desired format."""
if color_format == "hsv":
color = f"hsv({hsv[0]}, {hsv[1]}, {hsv[2]})"
elif color_format == "hsl":
hsl = self.hsv_to_hsl(hsv)
color = f"hsl({hsl[0]}, {hsl[1]}, {hsl[2]})"
elif color_format == "rgb":
rgb = self.hsv_to_rgb(hsv)
color = f"rgb({rgb[0]}, {rgb[1]}, {rgb[2]})"
else:
rgb = self.hsv_to_rgb(hsv)
color = f"#{rgb[0]:02x}{rgb[1]:02x}{rgb[2]:02x}"
return color
def get_minimum_brightness(self, h: int, s: int) -> int:
"""Return the minimum allowed brightness for ``h`` and ``s``."""
lower_bounds: Sequence[Tuple[int, int]] = self.get_color_info(h)["lower_bounds"]
for i in range(len(lower_bounds) - 1):
s1, v1 = lower_bounds[i]
s2, v2 = lower_bounds[i + 1]
if s1 <= s <= s2:
m: float = (v2 - v1) / (s2 - s1)
b: float = v1 - m * s1
return int(m * s + b)
return 0
def get_hue_range(self, color_input: Optional[HueType]) -> Tuple[int, int]:
"""Return the hue range for a given ``color_input``."""
if isinstance(color_input, (int, float)) and 0 <= color_input <= 360:
color_input = int(color_input)
return (color_input, color_input)
elif isinstance(color_input, str) and color_input in self.colormap:
return self.colormap[color_input]["hue_range"][0]
elif color_input is None:
return (0, 360)
if isinstance(color_input, list):
color_input = tuple(color_input)
if (
isinstance(color_input, tuple)
and len(color_input) == 2
and all(isinstance(c, (float, int)) for c in color_input)
):
v1 = int(color_input[0])
v2 = int(color_input[1])
if v2 < v1:
|
cineuse/CNCGToolKit
|
apps/pw_multiScriptEditor/widgets/numBarWidget.py
|
Python
|
mit
| 3,802
| 0.00263
|
from PySide.QtCore import *
from PySide.QtGui import *
import managers
class lineNumberBarClass(QWidget):
def __init__(self, edit, parent=None):
QWidget.__init__(self, parent)
self.edit = edit
self.highest_line = 0
self.setMinimumWidth(30)
self.edit.installEventFilter(self)
self.edit.viewport().installEventFilter(self)
self.bg = None
def update(self, *args):
'''
Updates the number bar to display the current set of numbers.
Also, adjusts the width of the number bar if necessary.
'''
# The + 4 is used to compensate for the current line being bold.
if managers.context == 'hou':
fontSize = self.edit.fs
else:
fontSize = self.edit.font().pointSize()
width = ((self.fontMetrics().width(str(self.highest_line)) + 7))*(fontSize/13.0)
if self.width() != width and width > 10:
self.setFixedWidth(width)
bg = self.palette().brush(QPalette.Normal,QPalette.Window).color().toHsv()
v = bg.value()
if v > 20:
v = int(bg.value()*0.8)
else:
v = int(bg.value()*1.1)
self.bg = QColor.fromHsv(bg.hue(), bg.saturation(), v)
QWidget.update(self, *args)
def paintEvent(self, event):
contents_y = self.edit.verticalScrollBar().value()
page_bottom = contents_y + self.edit.viewport().height()
font_metrics = self.fontMetrics()
current_block = self.edit.document().findBlock(self.edit.textCursor().position())
painter = QPainter(self)
line_count = 0
# Iterate over all text blocks in the document.
block = self.edit.document().begin()
if managers.c
|
ontext == 'hou':
fontSize = self.edit.fs
font = QFont('monospace', fontSize*0.7)
offset = (font_metrics.ascent() + font_metrics.descent())/2
else:
fontSize = self.edit.font().pointSize()
|
font = painter.font()
font.setPixelSize(fontSize)
offset = font_metrics.ascent() + font_metrics.descent()
color = painter.pen().color()
painter.setFont(font)
align = Qt.AlignRight
while block.isValid():
line_count += 1
# The top left position of the block in the document
position = self.edit.document().documentLayout().blockBoundingRect(block).topLeft()
# Check if the position of the block is out side of the visible
# area.
if position.y() == page_bottom:
break
rec = QRect(0,
round(position.y()) - contents_y,
self.width()-5,
fontSize + offset)
# draw line rect
if block == current_block:
painter.setPen(Qt.NoPen)
painter.setBrush(QBrush(self.bg))
painter.drawRect(QRect(0,
round(position.y()) - contents_y,
self.width(),
fontSize + (offset/2) ))
# #restore color
painter.setPen(QPen(color))
# draw text
painter.drawText(rec, align, str(line_count))
# control points
block = block.next()
self.highest_line = line_count
painter.end()
QWidget.paintEvent(self, event)
def eventFilter(self, object, event):
# Update the line numbers for all events on the text edit and the viewport.
# This is easier than connecting all necessary singals.
if object in (self.edit, self.edit.viewport()):
self.update()
return False
return QWidget.eventFilter(object, event)
|
datagutten/comics
|
comics/comics/axecop.py
|
Python
|
agpl-3.0
| 766
| 0
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_d
|
ata import ComicDataBase
class ComicData(ComicDataBase):
name = 'Axe Cop'
language = 'en'
|
url = 'http://www.axecop.com/'
start_date = '2010-01-02'
rights = 'Ethan Nicolle'
class Crawler(CrawlerBase):
history_capable_days = 60
schedule = 'Tu'
time_zone = 'US/Pacific'
headers = {'User-Agent': 'Mozilla/4.0'}
def crawl(self, pub_date):
feed = self.parse_feed('http://axecop.com/feed/')
for entry in feed.for_date(pub_date):
title = entry.title
url = entry.summary.src('img[src*="/wp-content/uploads/"]')
url = url.replace('-150x150', '')
return CrawlerImage(url, title)
|
astrofrog/ginga
|
ginga/LayerImage.py
|
Python
|
bsd-3-clause
| 7,562
| 0.003835
|
#
# LayerImage.py -- Abstraction of an generic layered image.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import numpy
import time
import Bunch
import BaseImage
class LayerImage(object):
"""Mixin class for BaseImage subclasses. Adds la
|
yers and alpha/rgb
compositing.
"""
def __init__(self):
self._layer = []
self.cnt = 0
self.compose_types = ('alpha', 'rgb')
self.compose = 'alpha'
def _insertLayer(self, idx, image, alpha=None, name=None):
if alpha == None:
alpha = 1.0
if name == None:
name = "layer%d" %
|
(self.cnt)
self.cnt += 1
bnch = Bunch.Bunch(image=image, alpha=alpha, name=name)
self._layer.insert(idx, bnch)
def insertLayer(self, idx, image, alpha=None, name=None,
compose=True):
self._insertLayer(idx, image, alpha=alpha, name=name)
if compose:
self.compose_layers()
def getLayer(self, idx):
return self._layer[idx]
def numLayers(self):
return len(self._layer)
def getShape(self, entity='image'):
maxdim = -1
for layer in self._layer:
if entity == 'image':
shape = layer[entity].get_shape()
elif entity == 'alpha':
item = layer.alpha
# If alpha is an image, get the array
if isinstance(item, BaseImage.BaseImage):
item = layer.alpha.get_data()
shape = numpy.shape(item)
else:
raise BaseImage.ImageError("entity '%s' not in (image, alpha)" % (
entity))
if len(shape) > maxdim:
maxdim = len(shape)
maxshape = shape
return maxshape
## def alpha_combine(self, src, alpha, dst):
## return (src * alpha) + (dst * (1.0 - alpha))
def mono2color(self, data):
return numpy.dstack((data, data, data))
def alpha_multiply(self, alpha, data, shape=None):
"""(alpha) can be a scalar or an array.
"""
# alpha can be a scalar or an array
if shape == None:
shape = data.shape
if len(data.shape) == 2:
res = alpha * data
# If desired shape is monochrome then return a mono image
# otherwise broadcast to a grey color image.
if len(shape) == 2:
return res
# note: in timing tests, dstack was not as efficient here...
#data = numpy.dstack((res, res, res))
data = numpy.empty(shape)
data[:, :, 0] = res[:, :]
data[:, :, 1] = res[:, :]
data[:, :, 2] = res[:, :]
return data
else:
# note: in timing tests, dstack was not as efficient here...
#res = numpy.dstack((data[:, :, 0] * alpha,
# data[:, :, 1] * alpha,
# data[:, :, 2] * alpha))
res = numpy.empty(shape)
res[:, :, 0] = data[:, :, 0] * alpha
res[:, :, 1] = data[:, :, 1] * alpha
res[:, :, 2] = data[:, :, 2] * alpha
return res
def alpha_compose(self):
start_time = time.time()
shape = self.getShape()
ht, wd = shape[:2]
# alpha can be a scalar or an array, prepare for the appropriate kind
ashape = self.getShape(entity='alpha')
if len(ashape) == 0:
alpha_used = 0.0
else:
alpha_used = numpy.zeros((ht, wd))
# result holds the result of the composition
result = numpy.zeros(shape)
cnt = 0
for layer in self._layer:
alpha = layer.alpha
if isinstance(alpha, BaseImage.BaseImage):
alpha = alpha.get_data()
alpha = numpy.clip((1.0 - alpha_used) * alpha, 0.0, 1.0)
#mina = numpy.min(alpha)
#print "cnt=%d mina=%f" % (cnt, mina)
data = layer.image.get_data()
result += self.alpha_multiply(alpha, data, shape=shape)
alpha_used += layer.alpha
#numpy.clip(alpha_used, 0.0, 1.0)
cnt += 1
self.set_data(result)
end_time = time.time()
print "alpha compose=%.4f sec" % (end_time - start_time)
# def rgb_compose(self):
# slices = []
# start_time = time.time()
# for i in xrange(len(self._layer)):
# layer = self.getLayer(i)
# data = self.alpha_multiply(layer.alpha, layer.image.get_data())
# slices.append(data)
# split_time = time.time()
# result = numpy.dstack(slices)
# end_time = time.time()
# self.set_data(result)
# print "rgb_compose alpha multiply=%.4f sec dstack=%.4f sec sec total=%.4f sec" % (
# split_time - start_time, end_time - split_time,
# end_time - start_time)
def rgb_compose(self):
num = self.numLayers()
layer = self.getLayer(0)
wd, ht = layer.image.get_size()
result = numpy.empty((ht, wd, num))
start_time = time.time()
for i in xrange(len(self._layer)):
layer = self.getLayer(i)
alpha = layer.alpha
if isinstance(alpha, BaseImage.BaseImage):
alpha = alpha.get_data()
data = self.alpha_multiply(alpha, layer.image.get_data())
result[:, :, i] = data[:, :]
end_time = time.time()
self.set_data(result)
print "rgb_compose total=%.4f sec" % (
end_time - start_time)
def rgb_decompose(self, image):
data = image.get_data()
shape = data.shape
if len(shape) == 2:
self._insertLayer(0, image)
else:
names = ("Red", "Green", "Blue")
alphas = (0.292, 0.594, 0.114)
for i in xrange(shape[2]):
print "count = %d" % i
imgslice = data[:, :, i]
#img = BaseImage.BaseImage(data_np=imgslice, logger=self.logger)
# Create the same type of image as we are decomposing
img = image.__class__(data_np=imgslice, logger=self.logger)
if i < 3:
name = names[i]
alpha = alphas[i]
else:
name = "layer%d" % i
alpha = 0.0
self._insertLayer(i, img, name=name, alpha=alpha)
print "composing layers"
self.compose_layers()
print "rgb decompose done"
def setComposeType(self, ctype):
assert ctype in self.compose_types, \
BaseImage.ImageError("Bad compose type '%s': must be one of %s" % (
ctype, str(self.compose_types)))
self.compose = ctype
self.compose_layers()
def setAlpha(self, lidx, val):
layer = self._layer[lidx]
layer.alpha = val
self.compose_layers()
def setAlphas(self, vals):
for lidx in xrange(len(vals)):
layer = self._layer[lidx]
layer.alpha = vals[lidx]
self.compose_layers()
def compose_layers(self):
if self.compose == 'rgb':
self.rgb_compose()
else:
self.alpha_compose()
#END
|
vandorjw/notes
|
vandorjw/vandorjw/urls.py
|
Python
|
mit
| 1,049
| 0.00286
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from .sitemap import BlogSitemap
from .views import RobotPageView, HumanPageView, GooglePageView
admin.autodiscover()
sitemaps = {
'blog': BlogSitemap,
}
urlpatterns = patterns('',
url(
regex=r"^robots\.txt$",
view=RobotPageView.as_view(),
name="site_robots",
),
url(
regex=r"^humans\.txt$",
view=HumanPageView.as_view(),
name="site_humans",
),
url(
regex=r"^google25e8e23e2bfc7d2c\.html$",
view=
|
GooglePageView.as_view(),
name="google_webmasters",
),
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sit
|
emaps': sitemaps}),
url(r'^admin/', include(admin.site.urls)),
url(r'^browserid/', include('django_browserid.urls')),
url(r"^contact/$", include('contact.urls', namespace='contact', app_name='contact')),
url(r'^tinymce/', include('tinymce.urls')),
url(r"^", include('blog.urls', namespace='blog', app_name='blog')),
)
|
davidwboswell/documentation_autoresponse
|
lib/bedrock_util.py
|
Python
|
mpl-2.0
| 995
| 0.001005
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License,
|
v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.conf import settings
from django.http import Htt
|
pResponseRedirect
import l10n_utils
def secure_required(view_func):
"""Decorator makes sure URL is accessed over https."""
def _wrapped_view_func(request, *args, **kwargs):
if not request.is_secure():
if not getattr(settings, 'DEBUG', True):
request_url = request.build_absolute_uri(request.get_full_path())
secure_url = request_url.replace('http://', 'https://')
return HttpResponseRedirect(secure_url)
return view_func(request, *args, **kwargs)
return _wrapped_view_func
def server_error_view(request, template_name='500.html'):
"""500 error handler that runs context processors."""
return l10n_utils.render(request, template_name)
|
blzr/enigma2
|
lib/python/Screens/PVRState.py
|
Python
|
gpl-2.0
| 224
| 0.017857
|
from Screen import Screen
from Components.Label import Label
class PVRState(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["state"] = Label(text="")
class Ti
|
meshiftState(PVRS
|
tate):
pass
|
thoas/django-fairepart
|
fairepart/settings.py
|
Python
|
mit
| 354
| 0.002825
|
from django.conf import setting
|
s
BACKENDS = getattr(settings, 'FAIREPART_BACKENDS', (
'fairepart.backends.facebook.FacebookBackend',
'fairepart.backends.google.GoogleOAuth2Backend',
))
RELATION_LIST_PAGINATE_BY = getattr(settings, 'FAIREPART_RELATION_LIST_PAGINATE_BY', 5)
GOOGLE_APP_NAME = getattr(settings,
|
'FAIREPART_GOOGLE_APP_NAME', '')
|
mperignon/component_creator
|
topoflow_creator/topoflow/met_base.py
|
Python
|
gpl-2.0
| 109,509
| 0.01031
|
## Does "land_surface_air__latent_heat_flux" make sense? (2/5/13)
# Copyright (c) 2001-2014, Scott D. Peckham
#
# Sep 2014. Fixed sign error in update_bulk_richardson_number().
# Ability to compute separate P_snow and P_rain.
# Aug 2014. New CSDMS Standard Names and clean up.
# Nov 2013. Converted TopoFlow to a Python package.
#
# Jan 2013. Revised handling of input/output names.
# Oct 2012. CSDMS Standard Names (version 0.7.9) and BMI.
# May 2012. P is now a 1D array with one element and mutable,
# so any comp with ref to it can see it change.
# Jun 2010. update_net_shortwave_radiation(), etc.
# May 2010. Changes to initialize() and read_cfg_file().
# Aug 2009
# Jan 2009. Converted from IDL.
#
#-----------------------------------------------------------------------
# NOTES: This file defines a "base class" for meteorology
# components as well as any functions used by most or
# all meteorology methods. The methods of this class
# should be over-ridden as necessary for different
# methods of modeling meteorology.
#-----------------------------------------------------------------------
# Notes: Do we ever need to distinguish between a surface
# temperature and snow temperature (in the snow) ?
# Recall that a separate T_soil_x variable is used
# to compute Qc.
#
# Cp_snow is from NCAR CSM Flux Coupler web page
#
# rho_H2O is currently not adjustable with GUI. (still true?)
#
#-----------------------------------------------------------------------
#
# class met_component (inherits from BMI_base.py)
#
# get_attribute() # (10/26/11)
# get_input_var_names() # (5/15/12)
# get_output_var_names() # (5/15/12)
# get_var_name() # (5/15/12)
# get_var_units() # (5/15/12
|
)
# ---------------------
# set_constants()
# initialize()
# update()
# finalize()
# ----------------------------
# set_computed_input_vars()
# initialize_computed_vars()
# ----------------------------
# update_P_integral()
# update_P_max()
# update_P_rain() # (9/14/14, new method
|
)
# update_P_snow() # (9/14/14, new method)
# ------------------------------------
# update_bulk_richardson_number()
# update_bulk_aero_conductance()
# update_sensible_heat_flux()
# update_saturation_vapor_pressure()
# update_vapor_pressure()
# update_dew_point() # (7/6/10)
# update_precipitable_water_content() # (7/6/10)
# ------------------------------------
# update_latent_heat_flux()
# update_conduction_heat_flux()
# update_advection_heat_flux()
# ------------------------------------
# update_julian_day() # (7/1/10)
# update_net_shortwave_radiation() # (7/1/10)
# update_em_air() # (7/1/10)
# update_net_longwave_radiation() # (7/1/10)
# update_net_energy_flux() # ("Q_sum")
# ------------------------------------
# open_input_files()
# read_input_files()
# close_input_files()
# ------------------------------------
# update_outfile_names()
# open_output_files()
# write_output_files()
# close_output_files()
# save_grids()
# save_pixel_values()
#
# Functions:
# compare_em_air_methods()
#
#-----------------------------------------------------------------------
import numpy as np
import os
from topoflow.components import solar_funcs as solar
from topoflow.utils import BMI_base
from topoflow.utils import model_input
from topoflow.utils import model_output
from topoflow.utils import rtg_files
#-----------------------------------------------------------------------
class met_component( BMI_base.BMI_component ):
#-------------------------------------------------------------------
_att_map = {
'model_name': 'TopoFlow_Meteorology',
'version': '3.1',
'author_name': 'Scott D. Peckham',
'grid_type': 'uniform',
'time_step_type': 'fixed',
'step_method': 'explicit',
#-------------------------------------------------------------
'comp_name': 'Meteorology',
'model_family': 'TopoFlow',
'cfg_template_file': 'Meteorology.cfg.in',
'cfg_extension': '_meteorology.cfg',
'cmt_var_prefix': '/Meteorology/Input/Var/',
'gui_xml_file': '/home/csdms/cca/topoflow/3.1/src/share/cmt/gui/Meteorology.xml',
'dialog_title': 'Meteorology: Method 1 Parameters',
'time_units': 'seconds' }
#---------------------------------------------------------
# Note that SWE = "snow water equivalent", but it really
# just means "liquid_equivalent".
#---------------------------------------------------------
_input_var_names = [
'snowpack__z_mean_of_mass-per-volume_density', # rho_snow
'snowpack__depth', # h_snow
'snowpack__liquid-equivalent_depth', # h_swe
'snowpack__melt_volume_flux' ] # SM (MR used for ice?)
#-----------------------------------------------------------
# albedo, emissivity and transmittance are dimensionless.
#-----------------------------------------------------------
# "atmosphere_aerosol_dust__reduction_of_transmittance" vs.
# This TF parameter comes from Dingman, App. E, p. 604.
#-----------------------------------------------------------
# There is an Optical_Air_Mass function in solar_funcs.py.
# However, this quantity is not saved in comp state.
#
# "optical_path_length_ratio" vs. "optical_air_mass" OR
# "airmass_factor" OR "relative_airmass" OR
# "relative_optical_path_length"
#-----------------------------------------------------------
# Our term "liquid_equivalent_precipitation" is widely
# used on the Internet, with 374,000 Google hits.
#--------------------------------------------------------------
# Note: "bulk exchange coefficient" has 2460 Google hits.
# It is closely related to a "transfer coefficient"
# for mass, momentum or heat. There are no CF
# Standard Names with "bulk", "exchange" or "transfer".
#
# Zhang et al. (2000) use "bulk exchange coefficient" in a
# nonstandard way, with units of velocity vs. unitless.
#
# Dn = bulk exchange coeff for the conditions of
# neutral atmospheric stability (m/s)
# Dh = bulk exchange coeff for heat (m/s)
# De = bulk exchange coeff for vapor (m/s)
#---------------------------------------------------------------
# Now this component uses T_air to break the liquid-equivalent
# precip rate into separate P_rain and P_snow components.
# P_rain is used by channel_base.update_R()
# P_snow is used by snow_base.update_depth()
#---------------------------------------------------------------
_output_var_names = [
'atmosphere_aerosol_dust__reduction_of_transmittance', # dust_atten ##### (from GUI)
'atmosphere_air-column_water-vapor__liquid-equivalent_depth', # W_p ("precipitable depth")
'atmosphere_bottom_air__brutsaert_emissivity_canopy_factor', # canopy_factor
'atmosphere_bottom_air__brutsaert_emissivity_cloud_factor', # cloud_factor
'atmosphere_bottom_air__bulk_latent_heat_aerodynamic_conductance', # De (m s-1), latent
'atmosphere_bottom_air__bulk_sensible_heat_aerodynamic_conductance', # Dh (m s-1), sensible
'atmosphere_bottom_air__emissivity', # em_air
'atmosphere_bottom_air__mass-per-volume_density', # rho_air
'atmosphere_bottom_air__mass-specific_isobaric_heat_capacity', # Cp_a
|
brain-tec/partner-contact
|
partner_external_map/__init__.py
|
Python
|
agpl-3.0
| 90
| 0
|
# -*- coding: utf-8 -*-
from . import models
|
from .hooks import set_def
|
ault_map_settings
|
reubano/ckanutils
|
ckanutils.py
|
Python
|
mit
| 29,704
| 0.000135
|
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanutils
~~~~~~~~~
Provides methods for interacting with a CKAN instance
Examples:
literal blocks::
python example_google.py
Attributes:
CKAN_KEYS (List[str]): available CKAN keyword arguments.
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import requests
import ckanapi
import itert
|
ools as it
from os import environ, path as p
from datetime import datetime as dt
from operator import itemgetter
from pprint import pprint
from ckanapi import NotFound, NotAuthorize
|
d, ValidationError
from tabutils import process as pr, io, fntools as ft, convert as cv
__version__ = '0.14.9'
__title__ = 'ckanutils'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility library'
__email__ = 'reubano@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
CKAN_KEYS = ['hash_table', 'remote', 'api_key', 'ua', 'force', 'quiet']
API_KEY_ENV = 'CKAN_API_KEY'
REMOTE_ENV = 'CKAN_REMOTE_URL'
UA_ENV = 'CKAN_USER_AGENT'
DEF_USER_AGENT = 'ckanutils/%s' % __version__
DEF_HASH_PACK = 'hash-table'
DEF_HASH_RES = 'hash-table.csv'
CHUNKSIZE_ROWS = 10 ** 3
CHUNKSIZE_BYTES = 2 ** 20
ENCODING = 'utf-8'
class CKAN(object):
"""Interacts with a CKAN instance.
Attributes:
force (bool): Force.
verbose (bool): Print debug statements.
quiet (bool): Suppress debug statements.
address (str): CKAN url.
hash_table (str): The hash table package id.
keys (List[str]):
"""
def __init__(self, **kwargs):
"""Initialization method.
Args:
**kwargs: Keyword arguments.
Kwargs:
hash_table (str): The hash table package id.
remote (str): The remote ckan url.
api_key (str): The ckan api key.
ua (str): The user agent.
force (bool): Force (default: True).
quiet (bool): Suppress debug statements (default: False).
Returns:
New instance of :class:`CKAN`
Examples:
>>> CKAN() #doctest: +ELLIPSIS
<ckanutils.CKAN object at 0x...>
"""
default_ua = environ.get(UA_ENV, DEF_USER_AGENT)
def_remote = environ.get(REMOTE_ENV)
def_api_key = environ.get(API_KEY_ENV)
remote = kwargs.get('remote', def_remote)
self.api_key = kwargs.get('api_key', def_api_key)
self.force = kwargs.get('force', True)
self.quiet = kwargs.get('quiet')
self.user_agent = kwargs.get('ua', default_ua)
self.verbose = not self.quiet
self.hash_table = kwargs.get('hash_table', DEF_HASH_PACK)
ckan_kwargs = {'apikey': self.api_key, 'user_agent': self.user_agent}
attr = 'RemoteCKAN' if remote else 'LocalCKAN'
ckan = getattr(ckanapi, attr)(remote, **ckan_kwargs)
self.address = ckan.address
self.package_show = ckan.action.package_show
try:
self.hash_table_pack = self.package_show(id=self.hash_table)
except NotFound:
self.hash_table_pack = None
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
self.hash_table_pack = None
else:
raise err
try:
self.hash_table_id = self.hash_table_pack['resources'][0]['id']
except (IndexError, TypeError):
self.hash_table_id = None
# shortcuts
self.datastore_search = ckan.action.datastore_search
self.datastore_create = ckan.action.datastore_create
self.datastore_delete = ckan.action.datastore_delete
self.datastore_upsert = ckan.action.datastore_upsert
self.datastore_search = ckan.action.datastore_search
self.resource_show = ckan.action.resource_show
self.resource_create = ckan.action.resource_create
self.package_create = ckan.action.package_create
self.package_update = ckan.action.package_update
self.package_privatize = ckan.action.bulk_update_private
self.revision_show = ckan.action.revision_show
self.organization_list = ckan.action.organization_list_for_user
self.organization_show = ckan.action.organization_show
self.license_list = ckan.action.license_list
self.group_list = ckan.action.group_list
self.user = ckan.action.get_site_user()
def create_table(self, resource_id, fields, **kwargs):
"""Creates a datastore table for an existing filestore resource.
Args:
resource_id (str): The filestore resource id.
fields (List[dict]): fields/columns and their extra metadata.
**kwargs: Keyword arguments that are passed to datastore_create.
Kwargs:
force (bool): Create resource even if read-only.
aliases (List[str]): name(s) for read only alias(es) of the
resource.
primary_key (List[str]): field(s) that represent a unique key.
indexes (List[str]): index(es) on table.
Returns:
dict: The newly created data object.
Raises:
ValidationError: If unable to validate user on ckan site.
NotFound: If unable to find resource.
Examples:
>>> CKAN(quiet=True).create_table('rid', fields=[{'id': 'field', \
'type': 'text'}])
Traceback (most recent call last):
NotFound: Resource `rid` was not found in filestore.
"""
kwargs.setdefault('force', self.force)
kwargs['resource_id'] = resource_id
kwargs['fields'] = fields
err_msg = 'Resource `%s` was not found in filestore.' % resource_id
if self.verbose:
print('Creating table `%s` in datastore...' % resource_id)
try:
return self.datastore_create(**kwargs)
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
raise NotFound(err_msg)
else:
raise
def delete_table(self, resource_id, **kwargs):
"""Deletes a datastore table.
Args:
resource_id (str): The datastore resource id.
**kwargs: Keyword arguments that are passed to datastore_create.
Kwargs:
force (bool): Delete resource even if read-only.
filters (dict): Filters to apply before deleting, e.g.,
{"name": "fred"}. If missing delete whole table and all
dependent views.
Returns:
dict: Original filters sent if table was found, `None` otherwise.
Raises:
ValidationError: If unable to validate user on ckan site.
Examples:
>>> CKAN(quiet=True).delete_table('rid')
Can't delete. Table `rid` was not found in datastore.
"""
kwargs.setdefault('force', self.force)
kwargs['resource_id'] = resource_id
init_msg = "Can't delete. Table `%s`" % resource_id
err_msg = '%s was not found in datastore.' % init_msg
read_msg = '%s is read only.' % init_msg
if self.verbose:
print('Deleting table `%s` from datastore...' % resource_id)
try:
result = self.datastore_delete(**kwargs)
except NotFound:
print(err_msg)
result = None
except ValidationError as err:
if 'read-only' in err.error_dict:
print(read_msg)
print("Set 'force' to True and try again.")
result = None
elif err.error_dict.get('resource_id') == ['Not found: Resource']:
print(err_msg)
result = None
else:
raise err
return result
def insert_records(self, resource_id, records, **kwargs):
"""Inserts records into a datastore table.
Args:
resource_id (str): The datastore resource id.
records (List[dict]): The records to insert.
**kwargs: Keyword arguments t
|
mola/jalali-calendar
|
src/holiday.py
|
Python
|
gpl-2.0
| 1,532
| 0.106397
|
#!/usr/bin/env python
###
#
# Co
|
pyright (C) 2007 Mola Pahnadayan
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2
|
of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
holiday = { 1386:[[1,1],[1,2],[1,3],[1,4],[1,12],[1,13],[1,17],
[2,14],[2,28],[3,14],[3,15],[3,28],[5,6],[5,20],[6,7],
[7,11],[7,21],[8,15],[9,30],[10,8],[10,28],[10,29],
[11,22],[12,9],[12,17],[12,18],[12,29]],
1387:[[1,1],[1,2],[1,3],[1,4],[1,12],[1,13],[3,14],[3,15],[3,18],[4,26],[5,9],[5,27],
[7,1],[7,10],[8,4],[9,19],[9,27],[10,17],[10,18],
[11,22],[11,28],[12,6],[12,8],[12,25],[12,29]],
1388:[[1,1],[1,2],[1,3],[1,4],[1,12],[1,13],[3,7],[3,14],[3,15],[4,15],[4,29],[5,16],
[6,20],[6,29],[7,22],[9,7],[9,15],[10,5],[10,6],
[11,16],[11,22],[11,24],[11,26],[12,13],[12,29]] }
|
PyLadiesCZ/pyladies.cz
|
original/v1/s007-cards/klondike/test_popis_karty.py
|
Python
|
mit
| 1,114
| 0.000907
|
import pytest
import klondike
def test_popis_rubem_nahoru():
karta = 13, 'Pi', False
assert klondike.popis_karty(karta) == '[???]'
def test_popis_srdcova_kralovna():
karta = 12, 'Sr', True
assert klondike.popis_karty(karta) in ['[Q ♥]', '[Q S]']
def test_otoc_kralovnu():
karta = 12, 'Sr', True
assert klondike.otoc_kartu(karta, True) == (12, 'Sr', True)
assert klondike.otoc_kartu(karta, False) == (12, 'Sr', False)
def test_otoc_eso():
karta = 1, 'Pi', False
assert klondike.otoc_kartu(karta, True) == (1, 'Pi', True)
assert klondike.otoc_kartu(karta, False) == (1, 'Pi', False)
# Tohle je testovací vychytávka, kterou zatím neznáme:
# několik testů v jedné funkci
@pytest.mark.parametrize('hodnota,znak', [
(1, 'A'),
(2, '2'),
(3, '3'),
(4, '4'),
(5, '5'),
(6, '6'),
(7, '7'),
(8, '8'),
(9, '9'),
(10, 'X'),
(11, 'J'),
(12, 'Q'),
(13, 'K'),
])
def test_popis_hodnoty(hod
|
nota, znak):
karta = hodnota, 'Sr', True
assert klondike.popis_karty(karta)
|
in ['[' + znak + ' ♥]', '[' + znak + ' S]']
|
pyblub/pyload
|
pyload/utils/parse.py
|
Python
|
agpl-3.0
| 4,211
| 0
|
# -*- coding: utf-8 -*-
# @author: vuolter
from __future__ import absolute_import, unicode_literals
import os
import re
from future import standard_library
from pyload.utils import convert, purge, web
from pyload.utils.convert import to_str
from pyload.utils.layer.legacy import hashlib
from pyload.utils.time import seconds_to_midnight
standard_library.install_aliases()
_RE_ALIAS = re.compile(r'[\d.-_]+')
def alias(text):
chunks = _RE_ALIAS.split(purge.name(text))
return ''.join(word.capitalize() for word in chunks if word)
_BOOLEANMAP = {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
def boolean(text):
return _BOOLEANMAP.get(text.strip().lower())
def entries(text, allow_whitespaces=False):
chars = ';,|'
if not allow_whitespaces:
chars += r'\s'
pattr = r'[{0}]+'.format(chars)
return [entry for entry in re.split(pattr, text) if entry]
def hash(text):
text = text.replace('-', '').lower()
algop = '|'.join(hashlib.algorithms + ('adler32', 'crc(32)?'))
pattr = r'(?P<D1>{}|)\s*[:=]?\s*(?P<H>[\w^_]{8,}?)\s*[:=]?\s*(?P<D2>{}|)'
pattr = pattr.format(algop, algop)
m = re.search(pattr, text)
if m is None:
return None, None
checksum = m.group('H')
algorithm = m.group('D1') or m.group('D2')
if algorithm == 'crc':
algorithm = 'crc32'
return checksum, algorithm
def name(text, strict=True):
try:
name = web.parse.name(text)
except Exception:
name = os.path.basename(text).strip()
return name if strict else purge.name(name)
_ONEWORDS = (
'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight',
'nine', 'ten', 'eleven', 'twelve', 'thirteen', 'fourteen', 'fifteen',
'sixteen', 'seventeen', 'eighteen', 'nineteen')
_TENWORDS = (
'twenty', 'thirty', 'forty', 'fifty', 'sixty', 'seventy', 'eighty',
'ninety')
_RE_NUMBER = re.compile(r'[\s-]+')
def number(text):
try:
text = web.misc.translate(text).lower()
except Exception:
text = text.lower()
o_tuple = [(w, i) for i, w in enumerate(_ONEWORDS)]
t_tuple = [(w, i * 10) for i, w in enumerate(_TENWORDS, 2)]
numwords = dict(o_tuple + t_tuple)
tokens = _RE_NUMBER.split(text)
numbers = [_f for _f in (numwords.get(word) for word in tokens) if _f]
return sum(numbers) if numbers else None
_RE_PACKS = re.compile(r'[^a-z0-9]+(?:(cd|part).*?\d+)?', flags=re.I)
def packs(nameurls):
DEFAULT_URLNAME = 'Unknown'
packs = {}
for urlname, url in nameurls:
urlname = name(urlname, strict=False)
urlname = os.path.splitext(urlname)[0].strip()
urlname = _RE_PACKS.sub('_', urlname).strip('_')
if not urlname:
urlname = DEFAULT_URLNAME
packs.setdefault(urlname, []).append(url)
return packs
_RE_SIZE = re.compile(r'(?P<S>-?[\d.,]+)\s*(?P<U>[a-zA-Z]*)')
def bytesize(text, un
|
it=None): # returns integer bytes
DEFAULT_INPUTUNIT = 'byte'
m = _RE_SIZE.match(to_str(text))
if m is None:
return None
if unit is None:
unit = m.group('U') or DEFAULT_INPUTUNIT
size = float(m.group('S').replace(',', '.'))
unit = unit[0].lower()
|
return int(convert.size(size, unit, 'byte'))
_TIMEWORDS = ('this', 'a', 'an', 'next')
_TIMEMAP = {
'day': 60 ** 2 * 12, 'hr': 60 ** 2, 'hour': 60 ** 2, 'min': 60, 'sec': 1}
_RE_TIME = re.compile(r'(\d+|[a-zA-Z-]+)\s*(day|hr|hour|min|sec)|(\d+)')
def seconds(text):
def to_int(obj):
try:
return int(obj)
except ValueError:
return None
try:
text = web.misc.translate(text).lower()
except Exception:
text = text.lower()
pattr = r'({0})\s+day|today|daily'.format('|'.join(_TIMEWORDS))
m = re.search(pattr, text)
if m is not None:
return seconds_to_midnight()
seconds = sum(
(w in _TIMEWORDS or to_int(i or w) or number(w) or 1) *
_TIMEMAP.get(u, 1) for w, u, i in _RE_TIME.findall(text))
return seconds
def minutes(text):
return seconds(text) / 60
def hours(text):
return seconds(text) / 60 ** 2
|
alexjh/whatson
|
whats_on_to_simpledb.py
|
Python
|
gpl-2.0
| 9,735
| 0.009245
|
#!/usr/bin/python
"""Scrapes websvc and adds them to SimpleDB"""
from __future__ import print_function
import boto
import time
import datetime
import re
import pytz
import sys
import urllib
import collections
import yaml
from musicbrainz2.webservice import Query, TrackFilter, WebServiceError, \
AuthenticationError, ConnectionError, \
RequestError, ResponseError, \
ResourceNotFoundError
from musicbrainz2.model import Release
# TODO:
#
# * dynamically discover timezone
# * add station name in yaml
# 8 bytes for timestamp
# 5 for title
# 6 for artist
# 10 average for song title
# 10 average for artist name
# --
# 39 total bytes per record
# x 34 stations
# --
#
# 1,326 bytes
#
# 24 * 60 minutes in a day = 1440
#
# 3 minutes per song
#
# ---
#
# 480 songs per day
# x 1,326
# ---------
# 636,480 bytes per day
# x 30
# ---------
# 19,094,400 bytes per month
URL = 'http://websvc.bdsrealtime.com/NBDS.Consumer.Tempo/' \
'nowplaying.aspx?uid=Service%s&stnid=%04d'
POST = '__EVENTTARGET=detectTime&__EVENTARGUMENT=&detectTime='
def main():
"""Loops through all stations and scrapes the most recent songs"""
print(datetime.datetime.now())
try:
stations_yaml = open('stations.yaml')
except IOError:
print("Failed to load station list", file=sys.stderr)
sys.exit(-1)
stations = yaml.load(stations_yaml)
for key, value in stations.items():
scrapeStation( key, URL % (key, value['id']), POST, value['tz'] )
print(datetime.datetime.now())
def store_in_cloud_db( domain, plays ):
"""Stores a play list in a SimpleDB domain
Existing plays will be queried to see if the album has already been defined.
Keywork arguments:
domain -- the SimpleDB domain to store it in
plays -- a dict with keys representing timestamps, the value is a tuple (artist, title)
"""
items = {}
total = 0
for epoch, attrs in plays.items():
artist = attrs[0].replace('"', '""')
title = attrs[1].replace('"', '""')
song_query = 'select * from `%s` where Album is not null ' \
'and Artist = "%s" and Title = "%s"' \
% (domain.name, artist, title)
song_rs = domain.select(song_query, max_items=1)
album = None
for song in song_rs:
album = song['Album']
if album is not None and album is not "":
# TODO: FIXME: Query all domains, not just the current station
item_attrs = {'Artist': attrs[0],
'Title': attrs[1],
'Album': album}
#print("Found existing album", album, "for", attrs)
else:
item_attrs = {'Artist': attrs[0], 'Title': attrs[1]}
#print("Could not find album for", attrs)
items["%08x" % epoch] = item_attrs
if len(items) == 20:
domain.batch_put_attributes(items)
items = {}
total += 20
else:
if len(items) != 0:
domain.batch_put_attributes(items)
total += len(items)
print("Songs inserted: ", total)
def get_last_song_time( domain ):
"""Gets the timestamp of the last song played in a domain"""
query = 'select * from `%s` where itemName() > "00000000" ' \
'order by itemName() desc limit 1' % (domain.name)
result_set = domain.select(query, max_items=1)
for item in result_set:
pri
|
nt(domain.name, item.name, item)
try:
last_song_time = int(item.name, 16)
break
except ValueError:
invalid_item = domain.get_item(item.name)
print("Deleting", item.name)
domain.delete_item( invalid_item )
# print("Last song: ", int(item.name, 16), ":", item)
|
else:
last_song_time = 0
return last_song_time
def get_timestamps( source, timezone ):
timestamps = collections.OrderedDict()
song_times = re.findall('<option value=\"(.*?)\">(.*?)<\/option>',
source)
if ( len(song_times) == 0 ):
return timestamps
# Get the station's current time
then = datetime.datetime(1970, 1, 1)
then = pytz.UTC.localize(then)
then = then.astimezone(pytz.timezone(timezone))
station_time = datetime.datetime.now(pytz.timezone(timezone))
station_epoch = (station_time - then).total_seconds()
for song_time in reversed(song_times):
# Convert song time to 'current time'
is_pm = song_time[0][-2:] == 'PM'
hour, minute = song_time[0][:-2].split(':')
hour = int(hour)
minute = int(minute)
# If we are 1:00PM and greater
if is_pm and hour != 12:
hour += 12
# If we are 12:00AM - 12:59AM
if not is_pm and hour == 12:
hour = 0
song_dt = station_time.replace(hour=hour, minute=minute,
second=0, microsecond=0)
song_epoch = int((song_dt - then).total_seconds())
if song_epoch > station_epoch:
song_epoch -= 24*60*60
timestamps[song_epoch] = song_time[0]
return timestamps
def getSongInfo( plays ):
detailed_plays = collections.OrderedDict()
for k,v in plays.items():
q = Query()
time.sleep(1.1)
found = False
i = 1
while not found and i < 10:
try:
f = TrackFilter(title=v[1], artistName=v[0])
results = q.getTracks(f)
found = True
except (AuthenticationError,
ConnectionError,
RequestError,
ResponseError,
ResourceNotFoundError,
WebServiceError) as error:
detailed_plays[k] = (v[0], v[1], "")
print('Error:', error, 'waiting', i*10, 'seconds')
results = None
time.sleep(i*10)
i += 1
if (results != None) and (len(results) != 0):
found_release = None
release_type = None
release = None
for result in results:
track = result.track
title = track.title
artist = track.artist.name
# Prefer: album, single, live, anything else
for release in track.releases:
if Release.TYPE_ALBUM in release.getTypes():
found_release = release
release_type = Release.TYPE_ALBUM
break
elif Release.TYPE_SINGLE in release.getTypes():
if release_type != Release.TYPE_ALBUM:
found_release = release
release_type = Release.TYPE_SINGLE
elif Release.TYPE_LIVE in release.getTypes():
if release_type != Release.TYPE_ALBUM and \
release_type != Release.TYPE_SINGLE:
found_release = release
release_type = Release.TYPE_LIVE
else:
if release_type != Release.TYPE_ALBUM and \
release_type != Release.TYPE_SINGLE and \
release_type != Release.TYPE_LIVE:
found_release = release
if release_type == Release.TYPE_ALBUM:
break
if found_release == None:
album = ""
else:
album = release.title
detailed_plays[k] = (artist, title, album)
else:
detailed_plays[k] = (v[0], v[1], "")
return detailed_plays
def getPlays( times, url, post_base ):
plays = collections.OrderedDict()
# i = 0
for epoch, url_time_str i
|
PyLearner/tp-qemu
|
qemu/tests/qmp_basic_rhel6.py
|
Python
|
gpl-2.0
| 14,327
| 0
|
import logging
from autotest.client.shared import error
def run(test, params, env):
"""
QMP Specification test-suite: this checks if the *basic* protocol conforms
to its specification, which is file QMP/qmp-spec.txt in QEMU's source tree.
IMPORTANT NOTES:
o Most tests depend heavily on QMP's error information (eg. classes),
this might have bad implications as the error interface is going to
change in QMP
o Command testing is *not* covered in this suite. Each command has its
own specification and should be tested separately
o We use the same terminology as used by the QMP specification,
specially with regard to JSON types (eg. a Python dict is called
a json-object)
o This is divided in sub test-suites, please check the bottom of this
file to check the order in which they are run
TODO:
o Finding which test failed is not as easy as it should be
o Are all those check_*() functions really needed? Wouldn't a
specialized class (eg. a Response class) do better?
"""
def fail_no_key(qmp_dict, key):
if not isinstance(qmp_dict, dict):
raise error.TestFail("qmp_dict is not a dict (it's '%s')" %
type(qmp_dict))
if key not in qmp_dict:
raise error.TestFail("'%s' key doesn't exist in dict ('%s')" %
(key, str(qmp_dict)))
def check_dict_key(qmp_dict, key, keytype):
"""
Performs the following checks on a QMP dict key:
1. qmp_dict is a dict
2. key exists in qmp_dict
3. key is of type keytype
If any of these checks fails, error.TestFail is raised.
"""
fail_no_key(qmp_dict, key)
if not isinstance(qmp_dict[key], keytype):
raise error.TestFail("'%s' key is not of type '%s', it's '%s'" %
(key, keytype, type(qmp_dict[key])))
def check_key_is_dict(qmp_dict, key):
check_dict_key(qmp_dict, key, dict)
def check_key_is_list(qmp_dict, key):
check_dict_key(qmp_dict, key, list)
def check_key_is_str(qmp_dict, key):
check_dict_key(qmp_dict, key, unicode)
def check_str_key(qmp_dict, keyname, value=None):
check_dict_key(qmp_dict, keyname, unicode)
if value and value != qmp_dict[keyname]:
raise error.TestFail("'%s' key value '%s' should be '%s'" %
(keyname, str(qmp_dict[keyname]), str(value)))
def check_key_is_int(qmp_dict, key):
fail_no_key(qmp_dict, key)
try:
int(qmp_dict[key])
except Exception:
raise error.TestFail("'%s' key is not of type int, it's '%s'" %
(key, type(qmp_dict[key])))
def check_bool_key(qmp_dict, keyname, value=None):
check_dict_key(qmp_dict, keyname, bool)
if value and value != qmp_dict[keyname]:
raise error.TestFail("'%s' key value '%s' should be '%s'" %
(keyname, str(qmp_dict[keyname]), str(value)))
def check_success_resp(resp, empty=False):
"""
Check QMP OK response.
:param resp: QMP response
:param empty: if True, response should not contain data to return
"""
check_key_is_dict(resp, "return")
if empty and len(resp["return"]) > 0:
raise error.TestFail("success response is not empty ('%s')" %
str(resp))
def check_error_resp(resp, classname=None, datadict=None):
"""
Check QMP error response.
:param resp: QMP response
:param classname: Expected error class name
:param datadict: Expected error data dictionary
"""
logging.debug("resp %s", str(resp))
check_key_is_dict(resp, "error")
check_key_is_str(resp["error"], "class")
if classname and resp["error"]["class"] != classname:
raise error.TestFail("got error class '%s' expected '%s'" %
(resp["error
|
"]["class"], classname))
check_key_is_dict(resp["error"], "data")
if datadict and resp["error"]["data"] != datadict:
raise error.TestFail("got data dict '%s' expected '%s'" %
(resp["error"]["data"], datadict))
def test_version(version):
"""
Check the QMP greeting message version key which, according to QMP's
documentation, should be:
{ "qem
|
u": { "major": json-int, "minor": json-int, "micro": json-int }
"package": json-string }
"""
check_key_is_dict(version, "qemu")
check_key_is_str(version, "package")
def test_greeting(greeting):
check_key_is_dict(greeting, "QMP")
check_key_is_dict(greeting["QMP"], "version")
check_key_is_list(greeting["QMP"], "capabilities")
def greeting_suite(monitor):
"""
Check the greeting message format, as described in the QMP
specfication section '2.2 Server Greeting'.
{ "QMP": { "version": json-object, "capabilities": json-array } }
"""
greeting = monitor.get_greeting()
test_greeting(greeting)
test_version(greeting["QMP"]["version"])
def json_parsing_errors_suite(monitor):
"""
Check that QMP's parser is able to recover from parsing errors, please
check the JSON spec for more info on the JSON syntax (RFC 4627).
"""
# We're quite simple right now and the focus is on parsing errors that
# have already biten us in the past.
#
# TODO: The following test-cases are missing:
#
# - JSON numbers, strings and arrays
# - More invalid characters or malformed structures
# - Valid, but not obvious syntax, like zillion of spaces or
# strings with unicode chars (different suite maybe?)
bad_json = []
# A JSON value MUST be an object, array, number, string, true, false,
# or null
#
# NOTE: QMP seems to ignore a number of chars, like: | and ?
bad_json.append(":")
bad_json.append(",")
# Malformed json-objects
#
# NOTE: sending only "}" seems to break QMP
# NOTE: Duplicate keys are accepted (should it?)
bad_json.append("{ \"execute\" }")
bad_json.append("{ \"execute\": \"query-version\", }")
bad_json.append("{ 1: \"query-version\" }")
bad_json.append("{ true: \"query-version\" }")
bad_json.append("{ []: \"query-version\" }")
bad_json.append("{ {}: \"query-version\" }")
for cmd in bad_json:
resp = monitor.cmd_raw(cmd)
check_error_resp(resp, "JSONParsing")
def test_id_key(monitor):
"""
Check that QMP's "id" key is correctly handled.
"""
# The "id" key must be echoed back in error responses
id_key = "virt-test"
resp = monitor.cmd_qmp("eject", {"foobar": True}, q_id=id_key)
check_error_resp(resp)
check_str_key(resp, "id", id_key)
# The "id" key must be echoed back in success responses
resp = monitor.cmd_qmp("query-status", q_id=id_key)
check_success_resp(resp)
check_str_key(resp, "id", id_key)
# The "id" key can be any json-object
for id_key in (True, 1234, "string again!", [1, [], {}, True, "foo"],
{"key": {}}):
resp = monitor.cmd_qmp("query-status", q_id=id_key)
check_success_resp(resp)
if resp["id"] != id_key:
raise error.TestFail("expected id '%s' but got '%s'" %
(str(id_key), str(resp["id"])))
def test_invalid_arg_key(monitor):
"""
Currently, the only supported keys in the input object are: "execute",
"arguments" and "id". Although expansion is supported, invalid key
names must be detected.
"""
resp = monitor.cmd_obj({"execute": "eject", "foob
|
AccentDesign/wagtailstreamforms
|
wagtailstreamforms/models/abstract.py
|
Python
|
mit
| 318
| 0
|
from django.db impor
|
t models
class AbstractFormSetting(models.Model):
form = models.OneToOneField(
"wagtailstreamforms.Form",
on_delete=models.CASCADE,
related_name="advanced_settings",
)
class Meta:
abstract = True
def __str__(self):
|
return self.form.title
|
ptisserand/portage
|
pym/_emerge/search.py
|
Python
|
gpl-2.0
| 13,055
| 0.035619
|
# Copyright 1999-2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import unicode_literals
import re
import portage
from portage import os
from portage.dbapi.porttree import _parse_uri_map
from portage.dbapi.IndexedPortdb import IndexedPortdb
from portage.dbapi.IndexedVardb import IndexedVardb
from portage.localization import localized_size
from portage.output import bold, bold as white, darkgreen, green, red
from portage.util import writemsg_stdout
from portage.util.iterators.MultiIterGroupBy import MultiIterGroupBy
from _emerge.Package import Package
class search(object):
#
# class constants
#
VERSION_SHORT=1
VERSION_RELEASE=2
#
# public interface
#
def __init__(self, root_config, spinner, searchdesc,
verbose, usepkg, usepkgonly, search_index=True):
"""Searches the available and installed packages for the supplied search key.
The list of available and installed packages is created at object instantiation.
This makes successive searches faster."""
self.settings = root_config.settings
self.verbose = verbose
self.searchdesc = searchdesc
self.searchkey = None
# Disable the spinner since search results are displayed
# incrementally.
self.spinner = None
self.root_config = root_config
self.setconfig = root_config.setconfig
self.matches = {"pkg" : []}
self.mlen = 0
self._dbs = []
portdb = root_config.trees["porttree"].dbapi
bindb = root_config.trees["bintree"].dbapi
vardb = root_config.trees["vartree"].dbapi
if search_index:
portdb = IndexedPortdb(portdb)
vardb = IndexedVardb(vardb)
if not usepkgonly and portdb._have_root_eclass_dir:
self._dbs.append(portdb)
if (usepkg or usepkgonly) and bindb.cp_all():
self._dbs.append(bindb)
self._dbs.append(vardb)
self._portdb = portdb
self._vardb = vardb
def _spinner_update(self):
if self.spinner:
self.spinner.update()
def _cp_all(self):
iterators = []
for db in self._dbs:
i = db.cp_all()
try:
i = iter(i)
except TypeError:
pass
iterators.append(i)
for group in MultiIterGroupBy(iterators):
yield group[0]
def _aux_get(self, *args, **kwargs):
for db in self._dbs:
try:
return db.aux_get(*args, **kwargs)
except KeyError:
pass
raise KeyError(args[0])
def _aux_get_error(self, cpv):
portage.writemsg("emerge: search: "
"aux_get('%s') failed, skipping\n" % cpv,
noiselevel=-1)
def _findname(self, *args, **kwargs):
for db in self._dbs:
if db is not self._portdb:
# We don't want findname to return anything
# unless it's an ebuild in a portage tree.
# Otherwise, it's already built and we don't
# care about it.
continue
func = getattr(db, "findname", None)
i
|
f func:
value = func(*args, **kwargs)
if value:
return value
return None
def _getFetchMap(self, *args, **kwargs):
for db in self._dbs:
func = getattr(db, "getFetchMap", None)
if func:
value = func(*args, **kwargs)
if value:
return value
return {}
def _visible(self, db, cpv, metadata):
installed = db is self._vardb
built = installed or db is not self._por
|
tdb
pkg_type = "ebuild"
if installed:
pkg_type = "installed"
elif built:
pkg_type = "binary"
return Package(type_name=pkg_type,
root_config=self.root_config,
cpv=cpv, built=built, installed=installed,
metadata=metadata).visible
def _first_cp(self, cp):
for db in self._dbs:
if hasattr(db, "cp_list"):
matches = db.cp_list(cp)
if matches:
return matches[-1]
else:
matches = db.match(cp)
for cpv in matches:
if cpv.cp == cp:
return cpv
return None
def _xmatch(self, level, atom):
"""
This method does not expand old-style virtuals because it
is restricted to returning matches for a single ${CATEGORY}/${PN}
and old-style virual matches unreliable for that when querying
multiple package databases. If necessary, old-style virtuals
can be performed on atoms prior to calling this method.
"""
cp = portage.dep_getkey(atom)
if level == "match-all":
matches = set()
for db in self._dbs:
if hasattr(db, "xmatch"):
matches.update(db.xmatch(level, atom))
else:
matches.update(db.match(atom))
result = list(x for x in matches if portage.cpv_getkey(x) == cp)
db._cpv_sort_ascending(result)
elif level == "match-visible":
matches = set()
for db in self._dbs:
if hasattr(db, "xmatch"):
matches.update(db.xmatch(level, atom))
else:
db_keys = list(db._aux_cache_keys)
for cpv in db.match(atom):
try:
metadata = zip(db_keys,
db.aux_get(cpv, db_keys))
except KeyError:
self._aux_get_error(cpv)
continue
if not self._visible(db, cpv, metadata):
continue
matches.add(cpv)
result = list(x for x in matches if portage.cpv_getkey(x) == cp)
db._cpv_sort_ascending(result)
elif level == "bestmatch-visible":
result = None
for db in self._dbs:
if hasattr(db, "xmatch"):
cpv = db.xmatch("bestmatch-visible", atom)
if not cpv or portage.cpv_getkey(cpv) != cp:
continue
if not result or cpv == portage.best([cpv, result]):
result = cpv
else:
db_keys = list(db._aux_cache_keys)
matches = db.match(atom)
try:
db.match_unordered
except AttributeError:
pass
else:
db._cpv_sort_ascending(matches)
# break out of this loop with highest visible
# match, checked in descending order
for cpv in reversed(matches):
if portage.cpv_getkey(cpv) != cp:
continue
try:
metadata = zip(db_keys,
db.aux_get(cpv, db_keys))
except KeyError:
self._aux_get_error(cpv)
continue
if not self._visible(db, cpv, metadata):
continue
if not result or cpv == portage.best([cpv, result]):
result = cpv
break
else:
raise NotImplementedError(level)
return result
def execute(self,searchkey):
"""Performs the search for the supplied search key"""
self.searchkey = searchkey
def _iter_search(self):
match_category = 0
self.packagematches = []
if self.searchdesc:
self.searchdesc=1
self.matches = {"pkg":[], "desc":[], "set":[]}
else:
self.searchdesc=0
self.matches = {"pkg":[], "set":[]}
writemsg_stdout("Searching...\n\n", noiselevel=-1)
regexsearch = False
if self.searchkey.startswith('%'):
regexsearch = True
self.searchkey = self.searchkey[1:]
if self.searchkey.startswith('@'):
match_category = 1
self.searchkey = self.searchkey[1:]
if regexsearch:
self.searchre=re.compile(self.searchkey,re.I)
else:
self.searchre=re.compile(re.escape(self.searchkey), re.I)
for package in self._cp_all():
self._spinner_update()
if match_category:
match_string = package[:]
else:
match_string = package.split("/")[-1]
if self.searchre.search(match_string):
yield ("pkg", package)
elif self.searchdesc: # DESCRIPTION searching
# Use _first_cp to avoid an expensive visibility check,
# since the visibility check can be avoided entirely
# when the DESCRIPTION does not match.
full_package = self._first_cp(package)
if not full_package:
continue
try:
full_desc = self._aux_get(
full_package, ["DESCRIPTION"])[0]
except KeyError:
self._aux_get_error(full_package)
continue
if not self.searchre.search(full_desc):
continue
yield ("desc", package)
self.sdict = self.setconfig.getSets()
for setname in self.sdict:
self._spinner_update()
if match_category:
match_string = setname
else:
match_string = setname.split("/")[-1]
if self.searchre.search(match_string):
yield ("set", setname)
elif self.searchdesc:
if self.searchre.search(
self.sdict[setname].getMetadata("DESCRIPTION")):
yield ("set", setname)
def addCP(self, cp):
if not self._xmatch("match-all", cp):
return
self.matches["pkg"].append(cp)
self.mlen += 1
def output(self):
"""Outputs the results of the search."""
class msg(object):
@staticmethod
def append(msg):
writemsg_stdout(msg, no
|
mlperf/training_results_v0.6
|
Fujitsu/benchmarks/resnet/implementations/mxnet/python/mxnet/module/module.py
|
Python
|
apache-2.0
| 37,421
| 0.003233
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-many-instance-attributes, too-many-arguments, protected-access, too-many-branches
# pylint: disable=too-many-public-methods
"""A `Module` implement the `BaseModule` API by wrapping a `Symbol` and one or
more `Executor` for data parallelization.
"""
import logging
import warnings
from .. import context as ctx
from .. import optimizer as opt
from .. import ndarray as nd
from .executor_group import DataParallelExecutorGroup
from ..model import _create_kvstore, _initialize_kvstore, _update_params, _update_params_on_kvstore, _prepare_params
from ..model import load_checkpoint
from ..initializer import Uniform, InitDesc
from ..io import DataDesc
from ..ndarray import zeros
from .base_module import BaseModule, _check_input_names, _parse_data_desc
class Module(BaseModule):
"""Module is a basic module that wrap a `Symbol`. It is functionally the same
as the `FeedForward` model, except under the module API.
Parameters
----------
symbol : Symbol
data_names : list of str
Defaults to `('data')` for a typical model used in image classification.
label_names : list of str
Defaults to `('softmax_label')` for a typical model used in image
classification.
logger : Logger
Defaults to `logging`.
context : Context or list of Context
Defaults to ``mx.cpu()``.
work_load_list : list of number
Default ``None``, indicating uniform workload.
fixed_param_names: list of str
Default ``None``, indicating no network parameters are fixed.
state_names : list of str
states are similar to data and label, but not provided by data iterator.
Instead they are initialized to 0 and can be set by `set_states()`.
group2ctxs : dict of str to context or list of context,
or list of dict of str to context
Default is `None`. Mapping the `ctx_group` attribute to the context assignment.
compression_params : dict
Specifies type of gradient compression and additional arguments depending
on the type of compression being used. For example, 2bit compression requires a threshold.
Arguments would then be {'type':'2bit', 'threshold':0.5}
See mxnet.KVStore.set_gradient_compression method for more details on gradient compression.
"""
def __init__(self, symbol, data_names=('data',), label_names=('softmax_label',),
logger=logging, context=ctx.cpu(), work_load_list=None,
fixed_param_names=None, state_names=None, group2ctxs=None,
compression_params=None):
super(Module, self).__init__(logger=logger)
if isinstance(context, ctx.Context):
context = [context]
self._context = context
if work_load_list is None:
work_load_list = [1] * len(self._context)
assert len(work_load_list) == len(self._context)
self._work_load_list = work_load_list
self._group2ctxs = group2ctxs
self._symbol = symbol
data_names = list(data_names) if data_names is not None else []
label_names = list(label_names) if label_names is not None else []
state_names = list(state_names) if state_names is not None else []
fixed_param_names = list(fixed_param_names) if fixed_param_names is not None else []
_check_input_names(symbol, data_names, "data", True)
_check_input_names(symbol, label_names, "label", False)
_check_input_names(symbol, state_names, "state", True)
_check_input_names(symbol, fixed_param_names, "fixed_param", True)
arg_names = symbol.list_arguments()
input_names = data_names + label_names + state_names
self._param_names = [x for x in arg_names if x not in input_names]
self._fixed_param_names = fixed_param_names
self._aux_names = symbol.list_auxiliary_states()
self._data_names = data_names
self._label_names = label_names
self._state_names = state_names
self._output_names = symbol.list_outputs()
self._arg_params = None
self._aux_params = None
self._params_dirty = False
self._compression_params = compression_params
self._optimizer = None
self._kvstore = None
self._update_on_kvstore = None
self._updater = None
self._preload_opt_states = None
self._grad_req = None
self._exec_group = None
self._data_shapes = None
self._label_shapes = None
@staticmethod
def load(prefix, epoch, load_optimizer_states=False, **kwargs):
"""Creates a model from previously saved checkpoint.
Parameters
----------
prefix : str
path prefix of saved model files. You should have
"prefix-symbol.json", "prefix-xxxx.params", and
optionally "prefix-xxxx.states", where xxxx is the
epoch number.
epoch : int
epoch to load.
load_optimizer_states : bool
whether to load optimizer states. Checkpoint needs
to have been made with save_optimizer_states=True.
data_names : list of str
Default is `(
|
'data')` for a typical model used in image classification.
label_
|
names : list of str
Default is `('softmax_label')` for a typical model used in image
classification.
logger : Logger
Default is `logging`.
context : Context or list of Context
Default is ``cpu()``.
work_load_list : list of number
Default ``None``, indicating uniform workload.
fixed_param_names: list of str
Default ``None``, indicating no network parameters are fixed.
"""
sym, args, auxs = load_checkpoint(prefix, epoch)
mod = Module(symbol=sym, **kwargs)
mod._arg_params = args
mod._aux_params = auxs
mod.params_initialized = True
if load_optimizer_states:
mod._preload_opt_states = '%s-%04d.states'%(prefix, epoch)
return mod
def save_checkpoint(self, prefix, epoch, save_optimizer_states=False):
"""Saves current progress to checkpoint.
Use `mx.callback.module_checkpoint` as `epoch_end_callback` to save during training.
Parameters
----------
prefix : str
The file prefix to checkpoint to.
epoch : int
The current epoch number.
save_optimizer_states : bool
Whether to save optimizer states to continue training.
"""
self._symbol.save('%s-symbol.json'%prefix)
param_name = '%s-%04d.params' % (prefix, epoch)
self.save_params(param_name)
logging.info('Saved checkpoint to \"%s\"', param_name)
if save_optimizer_states:
state_name = '%s-%04d.states' % (prefix, epoch)
self.save_optimizer_states(state_name)
logging.info('Saved optimizer state to \"%s\"', state_name)
def _reset_bind(self):
"""Internal function to reset binded state."""
self.binded = False
self._exec_group = None
self._data_shapes = None
self._label_shapes = None
@property
def data_names(self):
"""A list of names for data required by this module."""
return self._data_names
@property
def label_names(
|
bl4ckdu5t/registron
|
tests/import/relimp/relimp1.py
|
Python
|
mit
| 795
| 0.005031
|
#-----------------------------------------------------------------------------
# Copyright (c) 2013, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------
|
-------------------------
|
----------------
from __future__ import absolute_import
name = 'relimp.relimp1'
from . import relimp2 as upper
from . relimp import relimp2 as lower
assert upper.name == 'relimp.relimp2'
assert lower.name == 'relimp.relimp.relimp2'
if upper.__name__ == lower.__name__:
raise SystemExit("Imported the same module")
if upper.__file__ == lower.__file__:
raise SystemExit("Imported the same file")
|
depp/sglib
|
script/d3build/msvc/base.py
|
Python
|
bsd-2-clause
| 1,042
| 0
|
# Copyright 2014 Dietrich Epp.
# This file is part of SGLib. SGLib is licensed under the terms of the
# 2-clause BSD license. For more information, see LICENSE.txt.
BASE_CONFIG = {
'Config.PlatformToolset': 'v120',
'Config.CharacterSet': 'Unicode',
'ClCompile.WarningLevel': 'Level3',
'ClCompile.SDLCheck': True,
'Link.GenerateDebugInformation': True,
}
DEBUG_CONFIG = {
'Config.UseD
|
ebugLibraries': True,
'VC.LinkIncremental': True,
'ClCompile.Optimization': 'Disabled',
'ClCompile.PreprocessorDefinitions': ['WIN32', '_DEBUG', '_WINDOWS'],
}
RELEASE_CONFIG = {
'Config.WholeProgramOptimization': True,
'Config.UseDebugLibraries': False,
'VC.LinkIncremental': False,
'ClCompile.Optimization': 'MaxSpeed',
'ClCompile.FunctionLevelLinking': True,
'ClCompile.IntrinsicFunctions': True,
'ClCompile.PreprocessorDef
|
initions': ['WIN32', 'NDEBUG', '_WINDOWS'],
'Link.GenerateDebugInformation': True,
'Link.EnableCOMDATFolding': True,
'Link.OptimizeReferences': True,
}
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractNotoriousOnlineBlogspotCom.py
|
Python
|
bsd-3-clause
| 569
| 0.033392
|
def extractNotoriousOnlineBlogspotCom(item):
'''
Parser for 'notorious-online.blogs
|
pot.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix
|
=postfix, tl_type=tl_type)
return False
|
whatisjasongoldstein/beagle
|
beagle/__init__.py
|
Python
|
mit
| 52
| 0
|
from .app import App
fr
|
om .decorators
|
import action
|
sven-hm/pythonocc-core
|
src/addons/Display/WebGl/threejs_renderer.py
|
Python
|
lgpl-3.0
| 12,135
| 0.003049
|
##Copyright 2011-2014 Thomas Paviot (tpaviot@gmail.com)
##
##This file is part of pythonOCC.
##
##pythonOCC is free software: you can redistribute it and/or modify
##it under the terms of the GNU Lesser General Public License as published by
##the Free Software Foundation, either version 3 of the License, or
##(at your option) any later version.
##
##pythonOCC is distributed in the hope that it will be useful,
##but WITHOUT ANY WARRANTY; without even the implied warranty of
##MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
##GNU Lesser General Public License for more details.
##
##You should have received a copy of the GNU Lesser General Public License
##along with pythonOCC. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import webbrowser
from OCC.Visualization import Tesselator
import OCC
from time import time
import os
import tempfile
HEADER = """
<head>
<title>pythonOCC @VERSION@ webgl renderer</title>
<meta name='Author' content='Thomas Paviot - tpaviot@gmail.com'>
<meta name='Keywords' content='WebGl,pythonOCC'>
<meta charset="utf-8">
<style type="text/css">
body {
background-color: @background-color@;
margin: 0px;
overflow: hidden;
}
#info {
position: absolute;
top: 96%;
width: 96%;
color: #808080;
padding: 5px;
font-family: Monospace;
font-size: 13px;
text-align: right;
opacity: 1;
}
#pythonocc_rocks {
padding: 5px;
position: absolute;
left: 1%;
top: 85%;
height: 60px;
width: 305px;
border-radius: 5px;
border: 2px solid #f7941e;
opacity: 0.7;
font-family: Arial;
background-color: #414042;
color: #ffffff;
font-size: 16px;
opacity: 0.7;
}
a {
color: #f7941e;
text-decoration: none;
}
a:hover {
color: #ffffff;
}
</style>
</head>
"""
BODY = """
<body>
<div id="container"></div>
<div id="info">
WebGL engine by <a href="http://github.com/mrdoob/three.js" target="_blank">three.js</a>
</div>
<div id="pythonocc_rocks">
<b>pythonOCC @VERSION@ WebGL renderer</b><hr>
CAD in a browser
<a style="font-size:14px;" href=http://www.pythonocc.org>http://www.pythonocc.org</a>
</div>
<script type="text/javascript" src="@Three.jsPath@/three.min.js"></script>
<script type="text/javascript" src="@Three.jsPath@/OrbitControls.js"></script>
<script type="text/javascript" src="@Three.jsPath@/stats.min.js"></script>
@VertexShaderDefinition@
@FragmentShaderDefinition@
<script type="text/javascript" src="./shape.js"></script>
<script type="text/javascript">
var camera, scene, renderer, object, stats, container, shape_material;
var targetRotation = 0;
var targetRotationOnMouseDown = 0;
var targetRotationY = 0;
var targetRotationYOnMouseDown = 0;
var mouseX = 0;
|
var mouseXOnMouseDown = 0;
var mouseY = 0;
var mouseYOnMouseDown = 0;
var moveForward = false;
var moveBackward = false;
var moveLeft = false;
var moveRight = false;
var moveUp = false;
var moveDow
|
n = false;
var windowHalfX = window.innerWidth / 2;
var windowHalfY = window.innerHeight / 2;
init();
animate();
function init() {
container = document.createElement( 'div' );
document.body.appendChild( container );
camera = new THREE.PerspectiveCamera( 50, window.innerWidth / window.innerHeight, 1, 200 );
camera.position.z = 100;
controls = new THREE.OrbitControls( camera );
scene = new THREE.Scene();
scene.add( new THREE.AmbientLight(0x101010));
directionalLight = new THREE.DirectionalLight( 0xffffff );
directionalLight.position.x = 1;
directionalLight.position.y = 1;
directionalLight.position.z = 2;
directionalLight.position.normalize();
scene.add( directionalLight );
light1 = new THREE.PointLight( 0xffffff );
scene.add( light1 );
@Uniforms@
@ShaderMaterialDefinition@
phong_material = new THREE.MeshPhongMaterial( { ambient: 0x000000,
color: 0xffaa00,
specular: 0x555555,
shininess: 30 });
object = new THREE.Mesh( new Shape(), @ShapeMaterial@);
object.overdraw = true;
object.rotation.x = -1.57/2;
scene.add( object );
renderer = new THREE.WebGLRenderer({antialias:true});
renderer.setClearColor("@background-color@");
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
renderer.shadowMapEnabled = true;
renderer.shadowMapType = THREE.PCFShadowMap;
stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.top = '0px';
container.appendChild( stats.domElement );
window.addEventListener( 'resize', onWindowResize, false );
}
function animate() {
requestAnimationFrame( animate );
controls.update();
render();
stats.update();
}
function render() {
@IncrementTime@
renderer.render( scene, camera );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
</script>
</body>
"""
class HTMLHeader(object):
def __init__(self, background_color='#000000'):
self._background_color = background_color
def get_str(self):
header_str = HEADER.replace('@background-color@', '%s' % self._background_color)
header_str = header_str.replace('@VERSION@', OCC.VERSION)
return header_str
class HTMLBody(object):
def __init__(self, background_color='#000000', vertex_shader=None,
fragment_shader=None, uniforms=None):
self._background_color = background_color
self._vertex_shader = vertex_shader
self._fragment_shader = fragment_shader
self._uniforms = uniforms
def get_str(self):
# get the location where pythonocc is running from
threejs_build_location = os.sep.join([OCC.__path__[0], 'Display', 'WebGl', 'js'])
body_str = BODY.replace('@Three.jsPath@', '%s' % threejs_build_location)
body_str = body_str.replace('@background-color@', '%s' % self._background_color)
body_str = body_str.replace('@VERSION@', OCC.VERSION)
if (self._fragment_shader is not None) and (self._fragment_shader is not None):
vertex_shader_string_definition = '<script type="x-shader/x-vertex" id="vertexShader">%s</script>' % self._vertex_shader
fragment_shader_string_definition = '<script type="x-shader/x-fragment" id="fragmentShader">%s</script>' % self._fragment_shader
shader_material_definition = """
var vertexShader = document.getElementById( 'vertexShader' ).textContent;
var fragmentShader = document.getElementById( 'fragmentShader' ).textContent;
var shader_material = new THREE.ShaderMaterial( { uniforms: uniforms,
vertexShader: vertexShader,
fragmentShader: fragmentShader } );
"""
if self._uniforms
|
hoevenvd/weewx_poller
|
bin/weewx/almanac.py
|
Python
|
gpl-3.0
| 17,450
| 0.009685
|
#
# Copyright (c) 2009, 2011, 2012 Tom Keffer <tkeffer@gmail.com>
#
# See the file LICENSE.txt for your full rights.
#
# $Revision: 1046 $
# $Author: tkeffer $
# $Date: 2013-02-21 06:38:26 -0800 (Thu, 21 Feb 2013) $
#
"""Almanac data
This module can optionally use PyEphem, which offers high quality
astronomical calculations. See http://rhodesmill.org/pyephem. """
import time
import sys
import math
import weeutil.Moon
import weewx.units
# If the user has installed ephem, use it. Otherwise, fall back to the weeutil algorithms:
try:
import ephem
except ImportError:
import weeutil.Sun
# NB: In order to avoid an 'autocall' bug in Cheetah versions before 2.1,
# this class must not be a "new-style" class.
class Almanac():
"""Almanac data.
ATTRIBUTES.
As a minimum, the following attributes are available:
sunrise: Time (local) upper limb of the sun rises above the horizon, formatted using the format 'timeformat'.
sunset: Time (local) upper limb of the sun sinks below the horizon, formatted using the format 'timeformat'.
moon_phase: A description of the moon phase(eg. "new moon", Waxing crescent", etc.)
moon_fullness: Percent fullness of the moon (0=new moon, 100=full moon)
|
If the module 'ephem' is used, them many other attributes are available.
Here are a few examples:
sun.rise: Time upper limb of sun will rise above the
|
horizon today in unix epoch time
sun.transit: Time of transit today (sun over meridian) in unix epoch time
sun.previous_sunrise: Time of last sunrise in unix epoch time
sun.az: Azimuth (in degrees) of sun
sun.alt: Altitude (in degrees) of sun
mars.rise: Time when upper limb of mars will rise above horizon today in unix epoch time
mars.ra: Right ascension of mars
etc.
EXAMPLES (note that these will only work in the Pacific Time Zone)
>>> t = 1238180400
>>> print timestamp_to_string(t)
2009-03-27 12:00:00 PDT (1238180400)
>>> almanac = Almanac(t, 46.0, -122.0)
Test backwards compatibility with attribute 'moon_fullness':
>>> print "Fullness of the moon (rounded) is %.2f%% [%s]" % (almanac.moon_fullness, almanac.moon_phase)
Fullness of the moon (rounded) is 2.00% [new (totally dark)]
Now get a more precise result for fullness of the moon:
>>> print "Fullness of the moon (more precise) is %.2f%%" % almanac.moon.moon_phase
Fullness of the moon (more precise) is 1.70%
Test backwards compatibility with attributes 'sunrise' and 'sunset'
>>> print "Sunrise, sunset:", almanac.sunrise, almanac.sunset
Sunrise, sunset: 06:56 19:30
Get sunrise, sun transit, and sunset using the new 'ephem' syntax:
>>> print "Sunrise, sun transit, sunset:", almanac.sun.rise, almanac.sun.transit, almanac.sun.set
Sunrise, sun transit, sunset: 06:56 13:13 19:30
Do the same with the moon:
>>> print "Moon rise, transit, set:", almanac.moon.rise, almanac.moon.transit, almanac.moon.set
Moon rise, transit, set: 06:59 14:01 21:20
Exercise equinox, solstice routines
>>> print almanac.next_vernal_equinox
20-Mar-2010 10:32
>>> print almanac.next_autumnal_equinox
22-Sep-2009 14:18
>>> print almanac.next_summer_solstice
20-Jun-2009 22:45
>>> print almanac.previous_winter_solstice
21-Dec-2008 04:03
>>> print almanac.next_winter_solstice
21-Dec-2009 09:46
Exercise moon state routines
>>> print almanac.next_full_moon
09-Apr-2009 07:55
>>> print almanac.next_new_moon
24-Apr-2009 20:22
>>> print almanac.next_first_quarter_moon
02-Apr-2009 07:33
Now location of the sun and moon
>>> print "Solar azimuth, altitude = (%.2f, %.2f)" % (almanac.sun.az, almanac.sun.alt)
Solar azimuth, altitude = (154.14, 44.02)
>>> print "Moon azimuth, altitude = (%.2f, %.2f)" % (almanac.moon.az, almanac.moon.alt)
Moon azimuth, altitude = (133.55, 47.89)
Try the pyephem "Naval Observatory" example.
>>> t = 1252252800
>>> print timestamp_to_gmtime(t)
2009-09-06 16:00:00 UTC (1252252800)
>>> atlanta = Almanac(t, 33.8, -84.4, pressure=0, horizon=-34.0/60.0)
>>> # Print it in GMT, so it can easily be compared to the example:
>>> print timestamp_to_gmtime(atlanta.sun.previous_rising.raw)
2009-09-06 11:14:56 UTC (1252235696)
>>> print timestamp_to_gmtime(atlanta.moon.next_setting.raw)
2009-09-07 14:05:29 UTC (1252332329)
Now try the civil twilight examples:
>>> print timestamp_to_gmtime(atlanta(horizon=-6).sun(use_center=1).previous_rising.raw)
2009-09-06 10:49:40 UTC (1252234180)
>>> print timestamp_to_gmtime(atlanta(horizon=-6).sun(use_center=1).next_setting.raw)
2009-09-07 00:21:22 UTC (1252282882)
"""
def __init__(self, time_ts, lat, lon,
altitude=None, # Use 'None' in case a bad value is passed in
temperature=None, # "
pressure=None, # "
horizon=None, # "
moon_phases=weeutil.Moon.moon_phases,
formatter=weewx.units.Formatter()):
"""Initialize an instance of Almanac
time_ts: A unix epoch timestamp with the time of the almanac. If None, the
present time will be used.
lat, lon: Observer's location
altitude: Observer's elevation in **meters**. [Optional. Default is 0 (sea level)]
temperature: Observer's temperature in **degrees Celsius**. [Optional. Default is 15.0]
pressure: Observer's atmospheric pressure in **mBars**. [Optional. Default is 1010]
horizon: Angle of the horizon in degrees [Optional. Default is zero]
moon_phases: An array of 8 strings with descriptions of the moon
phase. [optional. If not given, then weeutil.Moon.moon_phases will be used]
formatter: An instance of weewx.units.Formatter() with the formatting information
to be used.
"""
self.time_ts = time_ts if time_ts else time.time()
self.time_djd = timestamp_to_djd(self.time_ts)
self.lat = lat
self.lon = lon
self.altitude = altitude if altitude is not None else 0.0
self.temperature = temperature if temperature is not None else 15.0
self.pressure = pressure if pressure is not None else 1010.0
self.horizon = horizon if horizon is not None else 0.0
self.moon_phases = moon_phases
self.formatter = formatter
(y,m,d) = time.localtime(self.time_ts)[0:3]
(self.moon_index, self._moon_fullness) = weeutil.Moon.moon_phase(y, m, d)
self.moon_phase = self.moon_phases[self.moon_index]
# Check to see whether the user has module 'ephem'.
if 'ephem' in sys.modules:
self.hasExtras = True
else:
# No ephem package. Use the weeutil algorithms, which supply a minimum of functionality
(sunrise_utc, sunset_utc) = weeutil.Sun.sunRiseSet(y, m, d, self.lon, self.lat)
# The above function returns its results in UTC hours. Convert
# to a local time tuple
sunrise_tt = weeutil.weeutil.utc_to_local_tt(y, m, d, sunrise_utc)
sunset_tt = weeutil.weeutil.utc_to_local_tt(y, m, d, sunset_utc)
self._sunrise = time.strftime("%H:%M", sunrise_tt)
self._sunset = time.strftime("%H:%M", sunset_tt)
self.hasExtras = False
# Shortcuts, used for backwards compatibility
@property
def sunrise(self):
return self.sun.rise if self.hasExtras else self._sunrise
@property
def sunset(self):
return self.sun.set if self.hasExtras else self._sunset
@property
def moon_fullness(self):
return int(self.moon.moon_phase+0.5) if self.hasExtras else self._moon_fullness
# What follows is a bit of Python wizardry to allow syntax such as:
# almanac(horizon=-0.5
|
rogeriofalcone/treeio
|
sales/migrations/0003_treeiocurrency.py
|
Python
|
mit
| 30,418
| 0.007594
|
# encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from treeio.finance.models import Currency
from treeio.sales.models import SaleOrder, Opportunity, OrderedProduct
class Migration(DataMigration):
def forwards(self, orm):
"Add currencies to financial items"
try:
currency = Currency.objects.get(is_default=True)
except:
currency = Currency.objects.create()
currency.code = "USD"
currency.name = "USD United States of America, Dollars"
currency.symbol = u"$"
currency.is_default = True
currency.save()
for obj in SaleOrder.objects.all():
obj.currency = currency
obj.save()
for obj in Opportunity.objects.all():
obj.amount_currency = currency
obj.amount_display = obj.amount
obj.save()
for obj in OrderedProduct.objects.all():
obj.rate = obj.product.sell_price
obj.rate_display = obj.rate
obj.save()
for obj in SaleOrder.objects.all():
obj.update_total()
def backwards(self, orm):
raise RuntimeError("Cannot reverse this migration.")
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.accessentity': {
'Meta': {'object_name': 'AccessEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.comment': {
'Meta': {'object_name': 'Comment'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.User']", 'null': 'True', 'blank': 'True'}),
'body': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dislikes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'comments_disliked'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'comments_liked'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"})
},
'core.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group'},
'accessentity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.AccessEntity']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'parent': ('django.db.models.fields.related.ForeignKey',
|
[], {'blank': 'True', 'related_name': "'child_set'", 'null': 'True', 'to': "orm['core.Group']"})
},
'core.object': {
'Meta': {'object_name': 'Object'},
'comments': ('django.db.models.fields.r
|
elated.ManyToManyField', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Comment']"}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'objects_created'", 'null': 'True', 'to': "orm['core.User']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dislikes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'objects_disliked'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'full_access': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'objects_full_access'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.AccessEntity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'objects_liked'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'links': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'links_rel_+'", 'null': 'True', 'to': "orm['core.Object']"}),
'nuvius_resource': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'object_name': ('dj
|
artem-smotrakov/httpooh
|
config.py
|
Python
|
gpl-3.0
| 285
| 0.003509
|
#!/usr/bin/python
# contains configuration, parameters can be acces
|
sed as attributes
class Config:
|
# init from argparse.ArgumentParser
def __init__(self, parser):
self.args = vars(parser.parse_args())
def __getattr__(self, name):
return self.args[name]
|
vlttnv/scrimfinder2
|
scrim2/views/home.py
|
Python
|
gpl-2.0
| 2,491
| 0.00562
|
from flask import Blueprint, render_template, json, g, current_app, redirect, url_for, session
from datetime import datetime as dt
from scrim2.extensions import oid, db, lm
from scrim2.models import User
from sqlalchemy.orm.exc import NoResultFound
from flask.ext.login import login_user, logout_user, current_user
import requests, re
home_bp = Blueprint('home_bp', __name__)
@home_bp.route('/')
def index():
if current_user.is_authenticated():
return redirect(url_for('live_bp.live'))
return render_template('/home/index.html')
@home_bp.route('/login')
@oid.loginhandler
def login():
"""Log in via Steam OpenID
"""
if g.user is not None:
return redirect(oid.get_next_url())
else:
return oid.try_login('http://steamcommunity.com/openid')
@oid.after_login
def after_login(resp):
"""
"""
steam_id_regex = re.compile('steamcommunity.com/openid/id/(.*?)$')
steam_id = steam_id_regex.search(resp.identity_url).group(1)
try:
g.user = User.query.filter_by(steam_id=steam_id).one()
user_info = get_user_info(g.user.steam_id)
login_user(g.user)
return redirect(oid.get_next_url())
except NoResultFound:
print "CREATIN USER"
g.user = User()
steam_data = get_user_info(steam_id)
g.user.steam_id = steam_id
g.user.nickname = steam_data['personaname']
g.user.avatar_url = steam_data['avatar']
g.user.avatar_url_full = steam_data['avatarfull']
g.user.join_date = dt.utcnow()
db.session.add(g.user)
db.session.commit()
login_user(g.user)
return redirect(url_for('home_bp.index'))
@home_bp.route('/logout')
def logout():
logout_user()
return redirect(url_for('home_bp.index'))
def get_user_info(steam_id):
"""
Return player summaries of the user that has the steam_id.
Example:
{
u'steamid': u'steamid',
u'personaname': u'personaname',
...
}
See: https://developer.valvesoftware.com/wiki/Steam_Web_API#GetPlayerSummaries
|
_.28v0002.29
"""
api = 'http://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/?'
params = {
'key': current_app.config['STEAM_API_KEY'],
'steamids': steam_id,
'format': json
}
user_info = requests.get(url=api, params=params)
user_in
|
fo_json = user_info.json()
return user_info_json['response']['players'][0] or {}
|
caioserra/apiAdwords
|
examples/adspygoogle/dfa/v1_19/get_user_roles.py
|
Python
|
apache-2.0
| 2,451
| 0.008568
|
#!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example displays user role name, ID, subnetwork ID, number of assigned
users, and assigned permissions for the given search criteria. Results are
limited to the first 10 records.
Tags: userrole.getUserRoles
"""
__author__ = 'api.jdilallo@gmail.com (Joseph DiLallo)'
import os
import sys
sys.path.insert(0, os.path.join(
|
'..', '..', '..', '..'))
# Import appropriate classes fro
|
m the client library.
from adspygoogle import DfaClient
def main(client):
# Initialize appropriate service.
user_role_service = client.GetUserRoleService(
'https://advertisersapitest.doubleclick.net', 'v1.19')
# Set user role search criteria.
user_role_search_criteria = {
'pageSize': '10'
}
# Get user roles that match the search criteria.
results = user_role_service.GetUserRoles(user_role_search_criteria)[0]
# Display user role names, IDs, subnetwork IDs, number of assigned users, and
# assigned permissions.
if results['userRoles']:
for user_role in results['userRoles']:
print ('User role with name \'%s\', ID \'%s\', subnetwork ID \'%s\', and '
'assigned to \'%s\' users was found.'
% (user_role['name'], user_role['id'], user_role['subnetworkId'],
user_role['totalAssignedUsers']))
if user_role['permissions']:
print ' The above user role has the following permissions:'
for permission in user_role['permissions']:
print (' Permission with name \'%s\' and ID \'%s\'.'
% (permission['name'], permission['id']))
else:
print ' The above user role has no permissions assigned.'
else:
print 'No user roles found for your criteria.'
if __name__ == '__main__':
# Initialize client object.
client = DfaClient(path=os.path.join('..', '..', '..', '..'))
main(client)
|
cherepaha/PyDLV
|
demos/plot_compare_dlv_three_blocks.py
|
Python
|
gpl-3.0
| 2,266
| 0.01015
|
import pandas as pd
from matplotlib import cm
import matplotlib.pyplot as plt
from pydlv import dl_model_3, data_reader, data_analyser, dl_generator, dl_plotter, trajectory_plotter
'''
This script demonstrates how, using coefficients from the csv file generated previously,
plot a 3d surface of decision landscapes fitted to blocks of trajectories.
'''
def plot_surfaces(dlg, fit_params, subj_id, blocks, colors, labels):
dlp = dl_plotter.DLPlotter(elev=10, azim=69)
for i, block_no in enumerate(blocks):
x, y, dl = dlg.get_model_dl(fit_params.loc[subj_id, block_no][2:2+dlg.model.n_params])
|
dlp.plot_surface(x, y, dl, color=colors[i], alpha=0.8)
dlp.add_legend(colors, labels)
plt.savefig('figures/blocks_%i_dlv.pdf' % (subj_id))
def plot_trajectories(data, subj_id, blocks, colors, labels):
tp = trajectory_plotter.TrajectoryP
|
lotter()
for i, block_no in enumerate(blocks):
block_trajectories = data[(data.subj_id==subj_id) & (data.block_no==block_no)]
tp.plot_mean_trajectories(block_trajectories, colors[i], labels[i])
block_info = block_trajectories.groupby('trial_no').first().groupby('high_chosen') \
.mean()[['motion_time', 'max_d']]
print('\n %s\n' % (labels[i]))
print(block_info)
tp.add_legend_mean_traj(colors, labels)
plt.savefig('figures/blocks_%i_traj.pdf' % (subj_id))
def compare_dlv(subj_id, blocks):
fit_params = pd.read_csv('csv/fit_params_by_block_method_9.csv',
index_col=['subj_id', 'block_no'], header=0)
labels = ['Block %i' % (block) for block in blocks]
cmap = cm.viridis
colors = [cmap(0.7), cmap(0.35), cmap(0.1)]
model = dl_model_3.DLModel3()
dlg = dl_generator.DLGenerator(model)
plot_surfaces(dlg, fit_params, subj_id, blocks, colors, labels)
dr = data_reader.DataReader()
data = dr.get_processed_data(path='csv/processed_data_high_low.csv')
plot_trajectories(data, subj_id, blocks, colors, labels)
da = data_analyser.DataAnalyser()
stats = da.get_block_stats(data)
print('\n %s\n' % ('Block stats'))
print(stats.loc[subj_id])
#subj_id = 233
subj_id = 1334
blocks = [1, 2, 3]
compare_dlv(subj_id, blocks)
|
timkpaine/lantern
|
lantern/plotting/plot_bokeh.py
|
Python
|
apache-2.0
| 5,812
| 0.001721
|
import copy
import pandas as pd
from bokeh.plotting import figure, show, output_notebook
from bokeh.models import Legend, Span
# from bokeh.models import HoverTool
from ..utils import in_ipynb
from .plotobj import BasePlot
from .plotutils import get_color
_INITED = False
class BokehPlot(BasePlot):
def __init__(self, size=None, theme=None):
global _INITED
if not _INITED:
if in_ipynb():
output_notebook(hide_banner=True)
size = size or (800, 500)
self.width = size[0]
self.height = size[1]
self.figure = figure(toolbar_location="below",
toolbar_sticky=False,
x_axis_type='datetime',
plot_width=self.width,
plot_height=self.height) # TODO remove
self.legend = []
def show(self, title='', xlabel='', ylabel='', xaxis=True, yaxis=True, xticks=True, yticks=True, legend=True, grid=True, **kwargs):
# self.figure.add_tools(*[HoverTool(
# tooltips=[('x', '@x{%F}'), ('y', '@y')],
# formatters={'x': 'datetime'},
# mode='vline'
# ) for _ in data])
self.figure.outline_line_color = None
# vline = Span(location=0, dimension='height', line_color='red', line_width=3)
hline = Span(location=0, dimension='width', line_color='black', line_width=1)
self.figure.renderers.append(hline)
if xlabel:
self.figure.xaxis.axis_label = kwargs.get('xlabel')
if ylabel:
self.figure.yaxis.axis_label = kwargs.get('ylabel')
if title:
self.figure.title.text = kwargs.get('title')
if legend:
self.figure.legend.location = (self.width + 10, self.height + 10)
legend = Legend(items=self.legend, location=(10, 100))
legend.items = self.legend
legend.click_policy = "mute"
self.figure.add_layout(legend, 'right')
else:
self.figure.legend.location = None
if not grid:
self.figure.xgrid.grid_line_color = None
self.figure.ygrid.grid_line_color = None
# FIXME
if not yaxis:
for ax in self.figure.yaxis:
ax.axis_line_color = 'white'
if not xaxis:
for ax in self.figure.xaxis:
ax.axis_line_color = 'white'
# Turn off labels:
# self.figure.xaxis.major_label_text_font_size = '0pt'
show(self.figure)
return self.figure
def area(self, data, color=None, y_axis='left', stacked=False, **kwargs):
data2 = data.append(data.iloc[-1] * 0)
data2 = data2.append(data2.iloc[0] * 0)
data2 = data2.sort_index()
data2 = data2.sort_index()
x, y = copy.deepcopy(data2.iloc[0]), copy.deepcopy(data2.iloc[1])
data2.iloc[0], data2.iloc[1] = y, x
for i, col in enumerate(data):
c = get_color(i, col, color)
fig = self.figure.patch(x=data2.index, y=data2[col].values, legend=col, fill_alpha=.2, color=c, **kwargs)
self.legend.append((col, [fig]))
# for stacked: https://bokeh.pydata.org/en/latest/docs/gallery/brewer.html
# p.patches([x2] * areas.shape[1], [areas[c].values for c in areas], color=colors, alpha=0.8, line_color=None)
def _stacked(df):
df_top = df.cumsum(axis=1)
df_bottom = df_top.shift(axis=1).fillna({'y0': 0})[::-1]
df_stack = pd.concat([df_bottom, df_top], ignore_index=True)
return df_stack
def bar(self, data, color=None, y_axis='left', stacked=False, **kwargs):
# stacked bar: https://bokeh.pydata.org/en/latest/docs/gallery/bar_stacked.html
# stacked bar: https://bokeh.pydata.org/en/latest/docs/gallery/bar_stacked_split.html
c = []
for i, col in enumerate(data):
c.append(get_color(i, col, color))
fig = self.figure.vbar(x=data.index, top=data[col].values, width=.9, color=c, **kwargs)
self.legend.append((col, [fig]))
def hist(self, data, color=None, y_axis='left', stacked=False, **kwargs):
raise NotImplementedError()
def hline(self, y, color=None, **kwargs):
raise NotImplementedError()
def hspan(self, yhigh, ylow=0, color=None, **kwargs):
raise NotImplementedError()
# def candlestick(self, data):
# # https://bokeh.pydata.org/en/latest/docs/gallery/candlestick.html
def line(self, data, color=None, y_axis='left', **kwargs):
for i, col in enumerate(data):
c = get_color(i, col, color)
fig = self.figure.line(x=data.index, y=data[col].values, legend=col, color=c, **kwargs)
self.legend.append((col, [fig]))
def scatter(self, data, color=None, y_axis='left', **kwargs):
for i, col in enumerate(data):
if i == 0:
|
continue # don't scatter against self
x = data.columns[0]
y = data.columns[i]
c = get_color(i, col, color)
fig = self.figure.scatter(x=data[x],
y=data[y],
legend='%s vs %s' % (x, y),
fill_color=c,
fill_alpha=0.6,
line_color=None,
**kwargs)
self.legend.append(('%s vs %s' % (x, y), [fig]))
def step(self, data, color=None, y_axis='left', **kwargs):
raise NotImplementedError()
def vline(self, x, color=None, **kwargs):
raise NotImplementedError()
def vspan(self, xhigh, xlow=0, color=None, **kwargs):
raise NotImplementedError()
|
|
fstagni/DIRAC
|
FrameworkSystem/scripts/dirac-status-component.py
|
Python
|
gpl-3.0
| 1,235
| 0.008097
|
#!/usr/bin/env python
"""
Status of DIRAC components using runsvstat utility
"""
#
from __future__ import print_function
from DIRAC.Core.Base import Script
Script.disableCS()
Script.setUsageMessage('\n'.join([__doc__.split('\n')[1],
'Usage:',
' %s [option|cfgfile] ... [system [service|agent]]' % Script.scriptName,
'Arguments:',
' system: Name of the system for the component (default *: all)',
' ser
|
vice|agent: Name of the particular component (default *: all)']))
Script.parseCommandLine()
args = Script.getPositionalArgs()
from DIRAC.FrameworkSystem.Client.ComponentInstaller import gComponentInstaller
__RCSID__ = "$Id$"
if len(args) > 2:
Script.showHelp()
exit(-1)
system
|
= '*'
component = '*'
if len(args) > 0:
system = args[0]
if system != '*':
if len(args) > 1:
component = args[1]
#
gComponentInstaller.exitOnError = True
#
result = gComponentInstaller.getStartupComponentStatus([system, component])
if not result['OK']:
print('ERROR:', result['Message'])
exit(-1)
gComponentInstaller.printStartupStatus(result['Value'])
|
tfroehlich82/saleor
|
saleor/product/migrations/0021_add_hstore_extension.py
|
Python
|
bsd-3-clause
| 312
| 0
|
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.postgres.operations import H
|
StoreExtension
class Migration(migrations.Migration):
dependencies = [
('product', '0020_attribute_data_to_clas
|
s'),
]
operations = [
HStoreExtension(),
]
|
tyagow/AdvancingTheBlog
|
src/comments/forms.py
|
Python
|
mit
| 304
| 0.016447
|
from django i
|
mport forms
class CommentForm(forms.Form):
content_type = forms.CharField(widget=forms.HiddenInput)
object_id = forms.CharField(widget=forms.HiddenInput)
parent_id = forms.IntegerField(w
|
idget=forms.HiddenInput, required=False)
content = forms.CharField(label='',widget=forms.Textarea)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.