repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
haikuginger/beekeeper
test/test_variable_handers.py
Python
mit
3,412
0.005569
from __future__ import unicode_literals import unittest from functools import partial from beekeeper.variable_handlers import render import beekeeper.variable_handlers class VariableReceiver(object): def execute(self, var_type, **kwargs): render(self, var_type, **kwargs) def receive(self, expected, *args, **kwargs): if isinstance(expected, list): if k
wargs: self.assertIn(kwargs, expected) else: self.assertIn(args[0], expected) elif kwargs: self.assertEqual(expected, kwargs) else: self.assertEqual(expected, args[0]) class fakeuuid: def __init__(self):
self.hex = 'xxx' class VariableHandlerTest(VariableReceiver, unittest.TestCase): def test_data(self): self.set_headers = partial(self.receive, {'Content-Type': 'text/plain'}) self.set_data = partial(self.receive, b'this is text') self.execute('data', variable={'mimetype': 'text/plain', 'value': 'this is text'}) def test_http_auth(self): self.set_headers = partial(self.receive, {'Authorization': 'Basic dXNlcm5hbWU6cGFzc3dvcmQ='}) username = dict(value='username') password = dict(value='password') self.execute('http_basic_auth', username=username, password=password) def test_bearer_auth(self): self.set_headers = partial(self.receive, {'Authorization': 'Bearer PUT_YOUR_TOKEN_HERE'}) var = dict(value='PUT_YOUR_TOKEN_HERE') self.execute('bearer_token', var=var) def test_multiple_bearer(self): self.set_headers = partial(self.receive, {'Authorization': 'Nope'}) with self.assertRaises(Exception): self.execute('bearer_token', var1='thing', var2='otherthing') def test_http_form(self): expected = [ b'y=thing&x=whatever', b'x=whatever&y=thing' ] self.set_headers = partial(self.receive, {'Content-Type': 'application/x-www-form-urlencoded'}) self.set_data = partial(self.receive, expected) var = dict(x={'value':'whatever'}, y={'value':'thing'}) self.execute('http_form', **var) def test_multipart(self): self.old_uuid4 = beekeeper.variable_handlers.uuid4 beekeeper.variable_handlers.uuid4 = fakeuuid should = '\n--xxx\nContent-Disposition: form-data; name="x"\n\nwhatever\n--xxx\nContent-Disposition: form-data; name="y"; filename="thing.name"\nContent-Type: text/plain\n\nplaintexthere\n--xxx--'.encode('utf-8') othershould = '\n--xxx\nContent-Disposition: form-data; name="y"; filename="thing.name"\nContent-Type: text/plain\n\nplaintexthere\n--xxx\nContent-Disposition: form-data; name="x"\n\nwhatever\n--xxx--'.encode('utf-8') options = [should, othershould] self.set_headers = partial(self.receive, {'Content-Type': 'multipart/form-data; boundary=xxx'}) self.set_data = partial(self.receive, options) var = {'x':{'value': 'whatever'}, 'y':{'value':'plaintexthere', 'mimetype':'text/plain', 'filename':'thing.name'}} self.execute('multipart', **var) def test_cookies(self): expected = [{'Cookie': 'thing1; thing2'}, {'Cookie': 'thing2; thing1'}] var = {'a': {'value': 'thing1'}, 'b': {'value': 'thing2'}} self.set_headers = partial(self.receive, expected) self.execute('cookie', **var)
liqd/a4-meinberlin
tests/ideas/rules/test_rules_view.py
Python
agpl-3.0
4,529
0
import pytest import rules from adhocracy4.projects.enums import Access from adhocracy4.test.helpers import freeze_phase from adhocracy4.test.helpers import freeze_post_phase from adhocracy4.test.helpers import freeze_pre_phase from adhocracy4.test.helpers import setup_phase from adhocracy4.test.helpers import setup_users from meinberlin.apps.ideas import phases perm_name = 'meinberlin_ideas.view_idea' def test_perm_exists(): assert rules.perm_exists(perm_name) @pytest.mark.django_db def test_pre_phase(phase_factory, idea_factory, user): phase, _, project, item = setup_phase(phase_factory, idea_factory, phases.CollectPhase) anonymous, moderator, initiator = setup_users(project) assert project.access == Access.PUBLIC with freeze_pre_phase(phase): assert rules.has_perm(perm_name, anonymous, item) assert rules.has_perm(perm_name, user, item) assert rules.has_perm(perm_name, moderator, item) assert rules.has_perm(perm_name, initiator, item) @pytest.mark.django_db def test_phase_active(phase_factory, idea_factory, user): phase, _, project, item = setup_phase(phase_factory, idea_factory, phases.CollectPhase) anonymous, moderator, initiator = setup_users(project) assert project.access == Access.PUBLIC with freeze_phase(phase): assert rules.has_perm(perm_name, anonymous, item) assert rules.has_perm(perm_name, user, item) assert rules.has_perm(perm_name, moderator, item) assert rules.has_perm(perm_name, initiator, item) @pytest.mark.django_db def test_phase_active_project_private(phase_factory, idea_factory, user, user2): phase, _, project, item = setup_phase( phase_factory, idea_factory, phases.CollectPhase, module__project__access=Access.PRIVATE) anonymous, moderator, initiator = setup_users(project) participant = user2 project.participants.add(participant) assert project.access == Access.PRIVATE with freeze_phase(phase): assert not rules.has_perm(perm_name, anonymous, item) assert not rules.has_perm(perm_name, user, item) assert rules.has_perm(perm_name, participant, item) assert rules.has_perm(perm_name, moderator, item) assert rules.has_perm(perm_name, initiator, item) @pytest.mark.django_db def test_phase_active_project_semipublic(phase_factory, idea_factory, user, user2): phase, _, project, item = setup_phase( phase_factory, idea_factory, phases.CollectPhase, module__project__access=Access.SEMIPUBLIC) anonymous, moderator, initiator = setup_users(project) participant = user2 project.participants.add(participant)
assert project.access == Access.SEMIPUBLIC with freeze_phase(phase): assert rules.has_perm(perm_name, anonymous, item) assert rules.has_perm(perm_name, user, item) assert rules.has_perm(perm_name, participant, item) assert rules.has_perm(perm_name, m
oderator, item) assert rules.has_perm(perm_name, initiator, item) @pytest.mark.django_db def test_phase_active_project_draft(phase_factory, idea_factory, user): phase, _, project, item = setup_phase(phase_factory, idea_factory, phases.CollectPhase, module__project__is_draft=True) anonymous, moderator, initiator = setup_users(project) assert project.is_draft with freeze_phase(phase): assert not rules.has_perm(perm_name, anonymous, item) assert not rules.has_perm(perm_name, user, item) assert rules.has_perm(perm_name, moderator, item) assert rules.has_perm(perm_name, initiator, item) @pytest.mark.django_db def test_post_phase_project_archived(phase_factory, idea_factory, user): phase, _, project, item = setup_phase(phase_factory, idea_factory, phases.CollectPhase, module__project__is_archived=True) anonymous, moderator, initiator = setup_users(project) assert project.is_archived with freeze_post_phase(phase): assert rules.has_perm(perm_name, anonymous, item) assert rules.has_perm(perm_name, user, item) assert rules.has_perm(perm_name, moderator, item) assert rules.has_perm(perm_name, initiator, item)
VoiDeD/Sick-Beard
sickbeard/processTV.py
Python
gpl-3.0
9,409
0.004251
# Author: Nic Wolfe <nic@wolfeden.ca> # URL: http://code.google.com/p/sickbeard/ # # This file is part of Sick Beard. # # Sick Beard is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Sick Beard is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. from __future__ import with_statement import os import shutil import time import sickbeard from sickbeard import common from sickbeard import postProcessor from sickbeard import db, helpers, exceptions from sickbeard import encodingKludge as ek from sickbeard.exceptions import ex from sickbeard import logger def delete_folder(folder, check_empty=True): # check if it's a folder if not ek.ek(os.path.isdir, folder): return False # check if it isn't TV_DOWNLOAD_DIR if sickbeard.TV_DOWNLOAD_DIR: if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR): return False # check if it's empty folder when wanted checked if check_empty: check_files = ek.ek(os.listdir, folder) if check_files: return False # try deleting folder try: logger.log(u"Deleting folder: " + folder) shutil.rmtree(folder) except (OSError, IOError), e: logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING) return False return True def logHelper(logMessage, logLevel=logger.MESSAGE): logger.log(logMessage, logLevel) return logMessage + u"\n" def processDir(dirName, nzbName=None, method=None, recurse=False, pp_options={}): """ Scans through the files in dirName and processes whatever media files it finds dirName: The folder name to look in nzbName: The NZB name which resulted in this folder being downloaded method: The method of postprocessing: Automatic, Script, Manual recurse: Boolean for whether we should descend into subfolders or not """ returnStr = u"" returnStr += logHelper(u"Processing folder: " + dirName, logger.DEBUG) # if they passed us a real dir then assume it's the one we want if ek.ek(os.path.isdir, dirName): dirName = ek.ek(os.path.realpath, dirName) # if they've got a download dir configured then use it elif sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR) \ and ek.ek(os.path.normpath, dirName) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR): dirName = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek.ek(os.path.abspath, dirName).split(os.path.sep)[-1]) returnStr += logHelper(u"Trying to use folder: " + dirName, logger.DEBUG) # if we didn't find a real dir then quit if not ek.ek(os.path.isdir, dirName): returnStr += logHelper(u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.", logger.DEBUG) return returnStr # TODO: check if it's failed and deal with it if it is if ek.ek(os.path.basename, dirName).startswith('_FAILED_'): returnStr += logHelper(u"The directory name indicates it failed to extract, cancelling", logger.DEBUG) return returnStr elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'): returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized, cancelling", logger.DEBUG) return returnStr elif ek.ek(os.path.basename, dirName).upper().startswith('_UNPACK'): returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked, skipping", logger.DEBUG) return returnStr # make sure the dir isn't inside a show dir myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_shows") for sqlShow in sqlResults: if dirName.lower().startswith(ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek(os.path.realpath, sqlShow["location"]).lower(): returnStr += logHelper(u"You're trying to post process an existing show directory: " + dirName, logger.ERROR) returnStr += u"\n" return returnStr fileList = ek.ek(os.listdir, dirName) # split the list into video files and folders folders = filter(lambda x: ek.ek(os.path.isdir, ek.ek(os.path.join, dirName, x)), fileList) # videoFiles, sorted by size, process biggest file first. Leaves smaller same named file behind mediaFiles = filter(lambda x: ek.ek(os.path.exists, ek.ek(os.path.join, dirName, x)), filter(helpers.isMediaFile, fileList)) videoFiles = sorted(mediaFiles, key=lambda x: ek.ek(os.path.getsize, ek.ek(os.path.join, dirName, x)), reverse=True) remaining_video_files = list(videoFiles) num_videoFiles = len(videoFiles) # if there are no videofiles in parent and only one subfolder, pass the nzbName to child if num_videoFiles == 0 and len(folders) == 1: parent_nzbName = nzbName else: parent_nzbName = None # recursively process all the folders for cur_folder in folders: returnStr += u"\n" # use full path cur_folder = ek.ek(os.path.join, dirName, cur_folder) if helpers.is_hidden_folder(cur_folder): returnStr += logHelper(u"Ignoring hidden folder: " + cur_folder, logger.DEBUG) else: returnStr += logHelper(u"Recursively processing a folder: " + cur_folder, logger.DEBUG) returnStr += processDir(cur_folder, nzbName=parent_nzbName, recurse=True, method=method, pp_options=pp_options) remainingFolders = filter(lambda x: ek.ek(os.path.isdir, ek.ek(os.path.join, dirName, x)), fileList) if num_videoFiles == 0: returnStr += u"\n" returnStr += logHelper(u"There are no videofiles in folder: " + dirName, logger.DEBUG) # if there a no videofiles, try deleting empty folder if method != 'Manual': if delete_folder(dirName, check_empty=True): returnStr += logHelper(u"Deleted empty folder: " + dirName, logger.DEBUG) # if there's more than one videofile in the folder, files can be lost (overwritten) when nzbName contains only one episode. if num_videoFiles >= 2: nzbName = None # process any files in the dir for cur_video_file in videoFiles: cur_video_file_path = ek.ek(os.path.join, dirName, cur_video_file) if method == 'Automatic': # check if we processed this video file before cur_video_file_path_size = ek.ek(os.path.getsize, cur_video_file_path) myDB = db.DBConnection() search_sql = "SELECT tv_episodes.tvdbid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid" search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode" search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in common.Quality.DOWNLOADED]) + ")" search_sql += " and history.resource LIKE ? an
d tv_episodes.file_size = ?" sql_results = myDB.select(search_sql, [cur_video_file_path, cur_video_file_path_size]) if len(sql_results): returnStr += logHelper(u"Ignoring file: " + cur_video_file_path + " looks like it's
been processed already", logger.DEBUG) continue try: returnStr += u"\n" processor = postProcessor.Po
ThrawnCA/ckanext-qgov
ckanext/qgov/common/authenticator.py
Python
agpl-3.0
2,108
0.009013
from ckan.lib.authenticator import UsernamePasswordAuthenticator from ckan.model import User, Session from sqlalchemy import Column, types, MetaData, DDL from sqlalchemy.ext.declarative import declarative_base from zope.interface import implements from repoze.who.interfaces import IAuthenticator Base = declarative_base() import logging log = logging.getLogger(__name__) def intercept_authenticator(): meta = MetaData(bind = Session.get_bind(), reflect = True) if not 'login_attempts' in meta.tables['user'].columns: log.warn("'login_attempts' field does not exist, adding...") DDL("ALTER TABLE public.user ADD COLUMN login_attempts SMALLINT DEFAULT 0").execute(Session.get_bind()) UsernamePasswordAuthenticator.authenticate = QGOVAuthenticator().authenticate class QGOVAuthenticator(UsernamePasswordAuthenticator): implements(IAuthenticator) def authenticate(self, environ, identity): if not 'login' in identity or not 'password' in identity: return None user = User.by_name(identity.get('login')) if user is
None: log.debug('Login failed - username %r not found', identity.get('login')) return None qgovUser = Session.query(QGOVUser).filter_by(name = identity.get('login')).first() if qgovUser.login_attempts >= 10: log.debug('Login as %r failed - account is locked', identity.get('login')) elif user.validate_password(iden
tity.get('password')): # reset attempt count to 0 qgovUser.login_attempts = 0 Session.commit() return user.name else: log.debug('Login as %r failed - password not valid', identity.get('login')) qgovUser.login_attempts += 1 Session.commit() return None class QGOVUser(Base): __tablename__ = 'user' __mapper_args__ = {'include_properties' : ['id', 'name', 'login_attempts']} id = Column(types.UnicodeText, primary_key=True) name = Column(types.UnicodeText, nullable=False, unique=True) login_attempts = Column(types.SmallInteger)
rchristie/mapclientplugins.meshgeneratorstep
mapclientplugins/meshgeneratorstep/model/mastermodel.py
Python
apache-2.0
6,058
0.007098
import os import json from PySide2 import QtCore from opencmiss.zinc.context import Context from opencmiss.zinc.material import Material from mapclientplugins.meshgeneratorstep.model.meshgeneratormodel import MeshGeneratorModel from mapclientplugins.meshgeneratorstep.model.meshannotationmodel import MeshAnnotationModel from mapclientplugins.meshgeneratorstep.model.segmentationdatamodel import SegmentationDataModel from scaffoldmaker.scaffolds import Scaffolds_decodeJSON, Scaffolds_JSONEncoder class MasterModel(object): def __init__(self, location, identifier): self._location = location self._identifier = identifier self._filenameStem = os.path.join(self._location, self._identifier) self._context = Context("MeshGenerator") self._timekeeper = self._context.getTimekeepermodule().getDefaultTimekeeper() self._timer = QtCore.QTimer() self._current_time = 0.0 self._timeValueUpdate = None self._frameIndexUpdate = None self._initialise() self._region = self._context.createRegion() self._generator_model = MeshGeneratorModel(self._context, self._region, self._materialmodule) self._segmentation_data_model = SegmentationDataModel(self._region, self._materialmodule) self._annotation_model = MeshAnnotationModel() self._settings = { 'segmentation_data_settings' : self._segmentation_data_model.getSettings() } self._makeConnections() # self._loadSettings() def printLog(self): logger = self._context.getLogger() for index in range(logger.getNumberOfMessages()): print(logger.getMessageTextAtIndex(index)) def _initialise(self): self._filenameStem = os.path.join(self._location, self._identifier) tess = self._context.getTessellationmodule().getDefaultTessellation() tess.setRefinementFactors(12) # set up standard materials and glyphs so we can use them elsewhere self._materialmodule = self._context.getMaterialmodule() self._materialmodule.defineStandardMaterials() solid_blue = self._materialmodule.createMaterial() solid_blue.setName('solid_blue') solid_blue.setManaged(True) solid_blue.setAttributeReal3(Material.ATTRIBUTE_AMBIENT, [ 0.0, 0.2, 0.6 ]) solid_blue.setAttributeReal3(Material.ATTRIBUTE_DIFFUSE, [ 0.0, 0.7, 1.0 ]) solid_blue.setAttributeReal3(Material.ATTRIBUTE_EMISSION, [ 0.0, 0.0, 0.0 ]) solid_blue.setAttributeReal3(Material.ATTRIBUTE_SPECULAR, [ 0.1, 0.1, 0.1 ]) solid_blue.setAttributeReal(Material.ATTRIBUTE_SHININESS , 0.2) trans_blue = self._materialmodule.createMaterial() trans_blue.setName('trans_blue') trans_blue.setManaged(True) trans_blue.setAttributeReal3(Material.ATTRIBUTE_AMBIENT, [ 0.0, 0.2, 0.6 ]) trans_blue.setAttributeReal3(Material.ATTRIBUTE_DIFFUSE, [ 0.0, 0.7, 1.0 ]) trans_blue.setAttributeReal3(Material.ATTRIBUTE_EMISSION, [ 0.0, 0.0, 0.0 ]) trans_blue.setAttributeReal3(Material.ATTRIBUTE_SPECULAR, [ 0.1, 0.1, 0.1 ]) trans_blue.setAttributeReal(Material.ATTRIBUTE_ALPHA , 0.3) trans_blue.setAttributeReal(Material.ATTRIBUTE_SHININESS , 0.2) glyphmodule = self._context.getGlyphmodule() glyphmodule.defineStandardGlyphs() def _makeConnections(self): pass def getIdentifier(self): return self._identifier def getOutputModelFilename(self): return self._filenameStem + '.exf' def getGeneratorModel(self): return self._generator_model def getMeshAnnotationModel(self): return self._annotation_model def getSegmentationDataModel(self): return self.
_segmentation_data_model def getScene(self):
return self._region.getScene() def getContext(self): return self._context def registerSceneChangeCallback(self, sceneChangeCallback): self._generator_model.registerSceneChangeCallback(sceneChangeCallback) def done(self): self._saveSettings() self._generator_model.done() self._generator_model.writeModel(self.getOutputModelFilename()) self._generator_model.writeAnnotations(self._filenameStem) self._generator_model.exportToVtk(self._filenameStem) def _getSettings(self): ''' Ensures master model settings includes current settings for sub models. :return: Master setting dict. ''' settings = self._settings settings['generator_settings'] = self._generator_model.getSettings() settings['segmentation_data_settings'] = self._segmentation_data_model.getSettings() return settings def loadSettings(self): try: settings = self._settings with open(self._filenameStem + '-settings.json', 'r') as f: savedSettings = json.loads(f.read(), object_hook=Scaffolds_decodeJSON) settings.update(savedSettings) if not 'generator_settings' in settings: # migrate from old settings before named generator_settings settings = {'generator_settings': settings} except: # no settings saved yet, following gets defaults settings = self._getSettings() self._generator_model.setSettings(settings['generator_settings']) self._segmentation_data_model.setSettings(settings['segmentation_data_settings']) self._annotation_model.setScaffoldTypeByName(self._generator_model.getEditScaffoldTypeName()) self._getSettings() def _saveSettings(self): self._generator_model.updateSettingsBeforeWrite() settings = self._getSettings() with open(self._filenameStem + '-settings.json', 'w') as f: f.write(json.dumps(settings, cls=Scaffolds_JSONEncoder, sort_keys=True, indent=4)) def setSegmentationDataFile(self, data_filename): self._segmentation_data_model.setDataFilename(data_filename)
lbarahona/UdacityProject4
conference.py
Python
apache-2.0
37,472
0.005444
#!/usr/bin/env python """ conference.py -- Udacity conference server-side Python App Engine API; uses Google Cloud Endpoints $Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $ created by wesc on 2014 apr 21 """ __author__ = 'wesc+api@google.com (Wesley Chun)' from datetime import datetime import endpoints from protorpc import messages from protorpc import message_types from protorpc import remote from google.appengine.api import memcache from google.appengine.api import taskqueue from google.appengine.ext import ndb from mode
ls import ConflictException from models import Profile from models import ProfileMiniForm from models import ProfileForm from models import StringMessage from models import Boole
anMessage from models import Conference from models import ConferenceForm from models import ConferenceForms from models import ConferenceQueryForm from models import ConferenceQueryForms from models import TeeShirtSize from models import Session, SessionForm, SessionForms, SessionTypes from models import Speaker, SpeakerForm, SpeakerForms from settings import WEB_CLIENT_ID from settings import ANDROID_CLIENT_ID from settings import IOS_CLIENT_ID from settings import ANDROID_AUDIENCE from utils import getUserId EMAIL_SCOPE = endpoints.EMAIL_SCOPE API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID MEMCACHE_FEATURED_SPEAKER_KEY = "FEATURED_SPEAKER" MEMCACHE_ANNOUNCEMENTS_KEY = "RECENT_ANNOUNCEMENTS" ANNOUNCEMENT_TPL = ('Last chance to attend! The following conferences ' 'are nearly sold out: %s') # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - CONFERENCE_DEFAULTS = { "city": "Default City", "maxAttendees": 0, "seatsAvailable": 0, "topics": [ "Default", "Topic" ], } SESSION_DEFAULTS = { 'highlights': 'To be announced', 'duration': 60, } OPERATORS = { 'EQ': '=', 'GT': '>', 'GTEQ': '>=', 'LT': '<', 'LTEQ': '<=', 'NE': '!=' } FIELDS = { 'CITY': 'city', 'TOPIC': 'topics', 'MONTH': 'month', 'MAX_ATTENDEES': 'maxAttendees', } CONF_GET_REQUEST = endpoints.ResourceContainer( message_types.VoidMessage, websafeConferenceKey=messages.StringField(1), ) CONF_POST_REQUEST = endpoints.ResourceContainer( ConferenceForm, websafeConferenceKey=messages.StringField(1), ) SESSION_POST_REQUEST = endpoints.ResourceContainer( SessionForm, websafeConferenceKey=messages.StringField(1), ) SESSIONS_BY_SPEAKER = endpoints.ResourceContainer( message_types.VoidMessage, speakerKey=messages.StringField(1), ) SESSIONS_BY_TYPE = endpoints.ResourceContainer( message_types.VoidMessage, websafeConferenceKey=messages.StringField(1), type=messages.StringField(2), ) SESSION_WISH_REQUEST = endpoints.ResourceContainer( message_types.VoidMessage, websafeSessionKey=messages.StringField(1), ) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @endpoints.api(name='conference', version='v1', audiences=[ANDROID_AUDIENCE], allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID, ANDROID_CLIENT_ID, IOS_CLIENT_ID], scopes=[EMAIL_SCOPE]) class ConferenceApi(remote.Service): """Conference API v0.1""" # - - - Conference objects - - - - - - - - - - - - - - - - - def _copyConferenceToForm(self, conf, displayName): """Copy relevant fields from Conference to ConferenceForm.""" cf = ConferenceForm() for field in cf.all_fields(): if hasattr(conf, field.name): # convert Date to date string; just copy others if field.name.endswith('Date'): setattr(cf, field.name, str(getattr(conf, field.name))) else: setattr(cf, field.name, getattr(conf, field.name)) elif field.name == "websafeKey": setattr(cf, field.name, conf.key.urlsafe()) if displayName: setattr(cf, 'organizerDisplayName', displayName) cf.check_initialized() return cf def _createConferenceObject(self, request): """Create or update Conference object, returning ConferenceForm/request.""" # preload necessary data items user = endpoints.get_current_user() if not user: raise endpoints.UnauthorizedException('Authorization required') user_id = getUserId(user) if not request.name: raise endpoints.BadRequestException("Conference 'name' field required") # copy ConferenceForm/ProtoRPC Message into dict data = {field.name: getattr(request, field.name) for field in request.all_fields()} del data['websafeKey'] del data['organizerDisplayName'] # add default values for those missing (both data model & outbound Message) for df in CONFERENCE_DEFAULTS: if data[df] in (None, []): data[df] = CONFERENCE_DEFAULTS[df] setattr(request, df, CONFERENCE_DEFAULTS[df]) # convert dates from strings to Date objects; set month based on start_date if data['startDate']: data['startDate'] = datetime.strptime(data['startDate'][:10], "%Y-%m-%d").date() data['month'] = data['startDate'].month else: data['month'] = 0 if data['endDate']: data['endDate'] = datetime.strptime(data['endDate'][:10], "%Y-%m-%d").date() # set seatsAvailable to be same as maxAttendees on creation if data["maxAttendees"] > 0: data["seatsAvailable"] = data["maxAttendees"] # generate Profile Key based on user ID and Conference # ID based on Profile key get Conference key from ID p_key = ndb.Key(Profile, user_id) c_id = Conference.allocate_ids(size=1, parent=p_key)[0] c_key = ndb.Key(Conference, c_id, parent=p_key) data['key'] = c_key data['organizerUserId'] = request.organizerUserId = user_id # create Conference, send email to organizer confirming # creation of Conference & return (modified) ConferenceForm Conference(**data).put() taskqueue.add(params={'email': user.email(), 'conferenceInfo': repr(request)}, url='/tasks/send_confirmation_email' ) return request @ndb.transactional() def _updateConferenceObject(self, request): user = endpoints.get_current_user() if not user: raise endpoints.UnauthorizedException('Authorization required') user_id = getUserId(user) # copy ConferenceForm/ProtoRPC Message into dict data = {field.name: getattr(request, field.name) for field in request.all_fields()} # update existing conference conf = ndb.Key(urlsafe=request.websafeConferenceKey).get() # check that conf.key is a Conference key and it exists self._checkKey(conf.key, request.websafeConferenceKey, 'Conference') # check that user is owner if user_id != conf.organizerUserId: raise endpoints.ForbiddenException( 'Only the owner can update the conference.') # Not getting all the fields, so don't create a new object; just # copy relevant fields from ConferenceForm to Conference object for field in request.all_fields(): data = getattr(request, field.name) # only copy fields where we get data if data not in (None, []): # special handling for dates (convert string to Date) if field.name in ('startDate', 'endDate'): data = datetime.strptime(data, "%Y-%m-%d").date() if field.name == 'startDate': conf.month = data.month # write to Conference object setattr(conf, field.name, data) conf.put() prof = ndb.Key(Profile, user_id).get() return self._copyConferenceToForm(conf, getattr(prof, 'displayName')) @endpoints.method(ConferenceForm, ConferenceForm, path=
ntt-sic/heat
heat/tests/test_ceilometer_alarm.py
Python
apache-2.0
15,354
0.000065
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import json import mox import testtools from oslo.config import cfg from heat.tests import fakes from heat.tests import generic_resource from heat.tests.common import HeatTestCase from heat.tests import utils from heat.common import exception from heat.common import template_format from heat.openstack.common.importutils import try_import from heat.engine import clients from heat.engine import parser from heat.engine import resource from heat.engine import scheduler from heat.engine.properties import schemata from heat.engine.resources.ceilometer import alarm ceilometerclient = try_import('ceilometerclient.v2') alarm_template = ''' { "AWSTemplateFormatVersion" : "2010-09-09", "Description" : "Alarm Test", "Parameters" : {}, "Resources" : { "MEMAlarmHigh": { "Type": "OS::Ceilometer::Alarm", "Properties": { "description": "Scale-up if MEM > 50% for 1 minute", "meter_name": "MemoryUtilization", "statistic": "avg", "period": "60", "evaluation_periods": "1", "threshold": "50", "alarm_actions": [], "matching_metadata": {}, "comparison_operator": "gt" } }, "signal_handler" : { "Type" : "SignalResourceType" } } } ''' not_string_alarm_template = ''' { "AWSTemplateFormatVersion" : "2010-09-09", "Description" : "Alarm Test", "Parameters" : {}, "Resources" : { "MEMAlarmHigh": { "Type": "OS::Ceilometer::Alarm", "Properties": { "description": "Scale-up if MEM > 50% for 1 minute", "meter_name": "MemoryUtilization", "statistic": "avg", "period": 60, "evaluation_periods": 1, "threshold": 50, "alarm_actions": [], "matching_metadata": {}, "comparison_operator": "gt" } }, "signal_handler" : { "Type" : "SignalResourceType" } } } ''' combination_alarm_template = ''' { "AWSTemplateFormatVersion" : "2010-09-09", "Description" : "Combination Alarm Test", "Resources" : { "CombinAlarm": { "Type": "OS::Ceilometer::CombinationAlarm", "Properties": { "description": "Do stuff in combination", "alarm_ids": ["alarm1", "alarm2"], "operator": "and", "alarm_actions": [], } } } } ''' class FakeCeilometerAlarm(object): alarm_id = 'foo' class FakeCeilometerAlarms(object): def create(self, **kwargs): pass def update(self, **kwargs): pass def delete(self, alarm_id): pass class FakeCeilometerClient(object): alarms = FakeCeilometerAlarms() @testtools.skipIf(ceilometerclient is None, 'ceilometerclient unavailable') class CeilometerAlarmTest(HeatTestCase): def setUp(self): super(CeilometerAlarmTest, self).setUp() utils.setup_dummy_db() resource._register_class('SignalResourceType', generic_resource.SignalResource) cfg.CONF.set_default('heat_waitcondition_server_url', 'http://server.test:8000/v1/waitcondition') self.fc = fakes.FakeKeystoneClient() self.fa = FakeCeilometerClient() # Note tests creating a stack should be decorated with @stack_delete_after # to ensure the stack is properly cleaned up def create_stack(self, template=None): if template is None: template = alarm_template temp = template_format.parse(template) template = parser.Template(temp) ctx = utils.dummy_context() ctx.tenant_id = 'test_tenant' stack = parser.Stack(ctx, utils.random_name(), template, disable_rollback=True) stack.store() self.m.StubOutWithMock(resource.Resource, 'keystone') resource.Resource.keystone().MultipleTimes().AndReturn( self.fc) self.m.StubOutWithMock(alarm.CeilometerAlarm, 'ceilometer') alarm.CeilometerAlarm.ceilometer().MultipleTimes().AndReturn( self.fa) al = copy.deepcopy(temp['Resources']['MEMAlarmHigh']['Properties']) al['description'] = mox.IgnoreArg() al['name'] = mox.IgnoreArg() al['alarm_actions'] = mox.IgnoreArg() self.m.StubOutWithMock(self.fa.alarms, 'create') self.fa.alarms.create(**al).AndReturn(FakeCeilometerAlarm()) return stack @utils.stack_delete_after def test_mem_alarm_high_update_no_replace(self): ''' Make sure that we can change the update-able properties without replacing the Alarm rsrc. ''' #short circuit the alarm's references t = template_format.parse(alarm_template) properties = t['Resources']['MEMAlarmHigh']['Properties'] properties['alarm_actions'] = ['signal_handler'] properties['matching_metadata'] = {'a': 'v'} self.stack = self.create_stack(template=json.dumps(t)) self.m.StubOutWithMock(self.fa.alarms, 'update') schema = schemata(alarm.CeilometerAlarm.properties_schema) al2 = dict((k, mox.IgnoreArg()) for k, s in schema.items() if s.update_allowed) al2['alarm_id'] = mox.IgnoreArg() self.fa.alarms.update(**al2).AndReturn(None) self.m.ReplayAll() self.stack.create() rsrc = self.stack['MEMAlarmHigh'] snippet = copy.deepcopy(rsrc.parsed_template()) snippet['Properties']['comparison_operator'] = 'lt' snippet['Properties']['description'] = 'fruity' snippet['Properties']['evaluation_periods'] = '2' snippet['Properties']['period'] = '90' snippet['Properties']['enabled'] = 'true' snippet['Properties']['repeat_actions'] = True snippet['Properties']['statistic'] = 'max' snippet['Properties']['threshold'] = '39' snippet['Properties']['insufficient_data_actions'] = [] snippet['Properties']['alarm_actions'] = [] snippet['Properties']['ok_actions'] = ['signal_handler'] scheduler.TaskRunner(rsrc.update, snippet)() self.m.VerifyAll() @utils.stack_delete_after def test_mem_alarm_high_update_replace(self): ''' Make sure that the Alarm resource IS replaced when non-update-able properties are changed. ''' t = template_format.parse(alarm_template) properties = t['Resources']['MEMAlarmHigh']['Properties'] properties['alarm_actions'] = ['signal_handler'] properties['matching_metadata'] = {'a': 'v'} self.stack = self.create_stack(template=json.dumps(t)) self.m.ReplayAll() self.stack.create() rsrc = self.stack['MEMAlarmHigh'] snippet = copy.deepcopy(rsrc.parsed_template()) snippet['Properties']['meter_name'] = 'temp' updater = scheduler.TaskRunner(rsrc.update
, snippet) self.assertRaises(resource.UpdateReplace, updater) self.m.VerifyAll() @utils.stack_delete_after def test_mem_alarm_s
uspend_resume(self): """ Make sure that the Alarm resource gets disabled on suspend and reenabled on resume. """ self.stack = self.create_stack() self.m.StubOutWithMock(self.fa.alarms, 'update') al_suspend = {'alarm_id': mox.IgnoreArg(), 'enabled': False} self.fa.alarms.update(**al_suspend).AndReturn(None) al_resume = {'alarm_id': mox.IgnoreArg(), 'enabled': True} self.fa.alarms.update(**al_resume).AndReturn(None)
jzaremba/sima
sima/motion/dftreg.py
Python
gpl-2.0
31,079
0.000064
""" Motion correction of image sequences by 'efficient subpixel image registration by cross correlation'. A reference image is iteratively computed by aligning and averaging a subset of images/frames. 2015 Lloyd Russell, Christoph Schmidt-Hieber ******************************************************************************* Credit to Marius Pachitariu for concept of registering to aligned mean image. Credit to Olivier Dupont-Therrien, Doric Lenses Inc., for concept of applying Gaussian blur & Laplacian to eliminate static inhomogeneities. Parts of the code are based on: skimage.feature.register_translation, which is a port of MATLAB code by Manuel Guizar-Sicairos, Samuel T. Thurman, and James R. Fienup, "Efficient subpixel image registration algorithms," Optics Letters 33, 156-158 (2008). Relating to implementation of skimage.feature.register_translation: Copyright (C) 2011, the scikit-image team All rights reserved. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************* @author: llerussell """ from __future__ import absolute_import, division from builtins import map, range from functools import partial import multiprocessing import numpy as np from scipy.ndimage.interpolation import shift from scipy.ndimage import laplace from scipy.ndimage import gaussian_filter import time from . import motion try: from pyfftw.interfaces.numpy_fft import fftn, ifftn except ImportError: from numpy.fft import fftn, ifftn class DiscreteFourier2D(motion.MotionEstimationStrategy): """ Motion correction of image sequences by 'efficient subpixel image registration by cross correlation'. A reference image is iteratively computed by aligning and averaging a subset of images/frames. Parameters ---------- upsample_factor : int, optional upsample factor. final pixel alignment has resolution of 1/upsample_factor. if 1 only pixel level shifts are made - faster - and no interpolation. Default: 1. max_displacement : array of int, optional The maximum allowed displacement magnitudes in [y,x]. Default: None. num_images_for_mean : int, optional number of images to use to make the aligned mean image. Default: 100. randomise_frames : bool, optional randomise the images selected to make the mean image? if false the first 'num_frames_for_mean' frames will be used. Default: True. err_thresh : float, optional the threshold of mean pixel offset at which to stop aligning the mean image. Default: 0.01. max_iterations : int, optional the maximum number of iterations to compute the aligned mean image. Default: 5. rotation_scaling : bool, optional not yet implemented. Default: False. save_name : string, optional the file name for saving the final registered array of images to disk from within method. If None or 'none', the array will not be saved. Default: None. save_fmt : string, optional the tiff format to save as. options include 'mptiff', 'bigtiff', 'singles'. Default: 'mptiff'. n_processes : int, optional number of workers to use (multiprocessing). Default: 1. verbose : bool, optional enable verbose mode. Default: False. return_registered : bool, optional return registered frames? Default: False. laplace : float, optional Sigma of Gaussian. If positive, apply Gaussian blur & laplacian to all images before computing the cross correlation. This step is useful to eliminate static inhomogeneities (such as vignetting) from images. Typical use case includes single-photon widefield microendoscope imaging through a GRIN lens. Default: 0.0 References ---------- Parts of the code are based on: skimage.feature.register_translation, which is a port of MATLAB code by Manuel Guizar-Sicairos, Samuel T. Thurman, and James R. Fienup, "Efficient subpixel image registration algorithms," Optics Letters 33, 156-158 (2008). """ def __init__(self, upsample_factor=1, max_displacement=None, num_images_for_mean=100, randomise_frames=True, err_thresh=0.01, max_iterations=5, rotation_scaling=False, save_fmt='mptiff', save_name=None, n_processes=1, verbose=False, return_registered=False, laplace=0.0): self._params = dict(locals()) del self._params['self'] def _estimate(self, dataset): """ Parameters ---------- Returns ------- displacements : array (2, num_frames*num_cycles)-array of integers giving the estimated displacement of each frame """ params = self._params verbose = params['verbose'] n_processes = params['n_processes'] if verbose: print('Using ' + str(n_processes) + ' worker(s)') displacements = [] for sequence in dataset: num_planes = sequence.shape[1]
num_channels = sequence.shape
[4] if num_channels > 1: raise NotImplementedError("Error: only one colour channel \ can be used for DFT motion correction. Using channel 1.") for plane_idx in range(num_planes): # load into memory... need to pass numpy array to dftreg. # could(should?) rework it to instead accept tiff array if verbose: print('Loading plane ' + str(plane_idx + 1) + ' of ' + str(num_planes) + ' into numpy array') t0 = time.time() # reshape, one plane at a time frames = np.array(sequence[:, plane_idx, :, :, 0]) frames = np.squeeze(frames) e1 = time.time() - t0 if verbose: print(' Loaded in: ' + str(e1) + ' s') # do the registering # registered_frames return is useless, sima later uses the # displacements to shift the image (apply_displacements in # sima/sequence.py: _align method of _MotionCorrectedSequence # class) but this shifting is only pixel-level, much better # results if sub-pixel were possible - replace sima's way of # shifting? this may run into problems when sima then crops the # final image so no empty rows/columns at edge of any frame in # the video (trim_criterion) if params['laplace'] > 0: framesl = np.array([ np.abs(laplace(gaussian_filter(frame, params['laplace']))) for frame in frames]) else: framesl = frames output = _register( framesl, upsample_factor=params['upsample_factor'], max_displacement=params['max_displacement'], num_images_for_mean=params['num_images_for_mean'], randomise_frames=params['randomise_frames'], err_thresh=params['err_thresh'], max_iterations=params['max_iterations'], n_processes=params['n_processes'], save_fmt=params['save_fmt'], save_na
jayceyxc/hue
apps/search/src/search/conf.py
Python
apache-2.0
1,443
0
#!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Versio
n 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governi
ng permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from desktop.lib.conf import Config, coerce_bool SOLR_URL = Config( key="solr_url", help=_("URL of the Solr Server."), default="http://localhost:8983/solr/") EMPTY_QUERY = Config( key="empty_query", help=_("Query sent when no term is entered."), default="*:*") SECURITY_ENABLED = Config( key="security_enabled", help=_("Whether Solr requires client to perform Kerberos authentication."), default=False, type=coerce_bool) # Unused: deprecated by dashboard LATEST = Config( key="latest", help=_("Use latest Solr 5.2+ features."), default=False, type=coerce_bool)
sgiavasis/nipype
nipype/interfaces/tests/test_auto_MeshFix.py
Python
bsd-3-clause
3,030
0.024092
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ...testing import assert_equal from ..meshfix import MeshFix def test_MeshFix_inputs(): input_map = dict(args=dict(argstr='%s', ), cut_inner=dict(argstr='--cut-inner %d', ), cut_outer=dict(argstr='--cut-outer %d', ), decouple_inin=dict(argstr='--decouple-inin %d', ), decouple_outin=dict(argstr='--decouple-outin %d', ), decouple_outout=dict(argstr='--decouple-outout %d', ), dilation=dict(argstr='--dilate %d', ), dont_clean=dict(argstr='--no-clean', ), environ=dict(nohash=True, usedefault=True, ), epsilon_angle=dict(argstr='-a %f', ), finetuning_distance=dict(argstr='%f', requires=['finetuning_substeps'], ), finetuning_inwards=dict(argstr='--fineTuneIn ', requires=['finetuning_distance', 'finetuning_substeps'], ), finetuning_outwards=dict(argstr='--fineTuneIn ', requires=['finetuning_distance', 'finetuning_substeps'], xor=['finetuning_inwards'], ), finetuning_substeps=dict(argstr='%d', requires=['finetuning_distance'], ), ignore_exception=dict(nohash=True, usedefault=True, ), in_file1=dict(argstr='%s', mandatory=True, position=1, ), in_file2=dict(argstr='%s', position=2, ), join_closest_components=dict(argstr='-jc', xor=['join_closest_components'], ), join_overlapping_largest_components=dict(argstr='-j', xor=['join_closest_components'], ), laplacian_smoothing_steps=dict(argstr='--smooth %d', ), number_of_biggest_shells=dict(argstr='--shells %d', ), out_filename=dict(argstr='-o %s', genfile=True, ), output_type=dict(usedefault=True, ), quiet_mode=dict(argstr='-q', ), remove_handles=dict(argstr='--remove-handles', ), save_as_freesurfer_mesh=dict(argstr='--fsmesh', xor=['save_as_vrml', 'save_as_stl'], ), save_as_stl=dict(argstr='--stl', xor=['save_as_vmrl', 'save_as_freesurfer_mesh'], ), save_as_vmrl=dict(argstr='--wrl', xor=['save_as_stl', 'save_as_freesurfer_mesh'], ), set_intersections_to_one=dict(argstr='--intersect', ), terminal_output=dict(nohash=True, ), uniform_remeshing_steps=dict(argstr='-u %d', requires=['uniform_remeshing_vertices'], ), uniform_remeshing_vertices=dict(argstr='--vertices %d', requires=['uniform_remeshing_steps'], ), x_shift=dict(argstr='--smooth %d', ), ) inputs = MeshFix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value def test_MeshFix_outputs(): output_map = dict(mesh_file=dict(), ) outputs = MeshFix.output_spec() for key, metadata in list(output_map.items()): for metakey,
value in list(metadata.items()): yield assert_equal, getattr(outputs.traits()[key], metakey), value
MadManRises/Madgine
shared/bullet3-2.89/examples/pybullet/gym/pybullet_envs/deep_mimic/learning/ppo_agent.py
Python
mit
15,708
0.006175
import numpy as np import copy as copy import tensorflow as tf from pybullet_envs.deep_mimic.learning.pg_agent import PGAgent from pybullet_envs.deep_mimic.learning.solvers.mpi_solver import MPISolver impo
rt pybullet_envs.deep_mimic.learning.tf_util as TFUtil import pybullet_envs.deep_mimic.learning.rl_util as RLUtil from pybullet_utils.logger import Logger import pybullet_utils.mpi_util as MPIUtil import pybullet_utils.math_util as MathUtil from pybullet_envs.deep_mimic.env.env import Env ''' Proximal Policy Optimization Agent ''' class PPOAgent(PGAgent): NAME = "PPO" EPOCHS_KEY = "Epochs" BATCH_SIZE_KEY = "BatchSize" RATIO_CLIP_KEY = "RatioClip" NORM_ADV_CLIP
_KEY = "NormAdvClip" TD_LAMBDA_KEY = "TDLambda" TAR_CLIP_FRAC = "TarClipFrac" ACTOR_STEPSIZE_DECAY = "ActorStepsizeDecay" def __init__(self, world, id, json_data): super().__init__(world, id, json_data) return def _load_params(self, json_data): super()._load_params(json_data) self.epochs = 1 if (self.EPOCHS_KEY not in json_data) else json_data[self.EPOCHS_KEY] self.batch_size = 1024 if ( self.BATCH_SIZE_KEY not in json_data) else json_data[self.BATCH_SIZE_KEY] self.ratio_clip = 0.2 if ( self.RATIO_CLIP_KEY not in json_data) else json_data[self.RATIO_CLIP_KEY] self.norm_adv_clip = 5 if ( self.NORM_ADV_CLIP_KEY not in json_data) else json_data[self.NORM_ADV_CLIP_KEY] self.td_lambda = 0.95 if ( self.TD_LAMBDA_KEY not in json_data) else json_data[self.TD_LAMBDA_KEY] self.tar_clip_frac = -1 if ( self.TAR_CLIP_FRAC not in json_data) else json_data[self.TAR_CLIP_FRAC] self.actor_stepsize_decay = 0.5 if ( self.ACTOR_STEPSIZE_DECAY not in json_data) else json_data[self.ACTOR_STEPSIZE_DECAY] num_procs = MPIUtil.get_num_procs() local_batch_size = int(self.batch_size / num_procs) min_replay_size = 2 * local_batch_size # needed to prevent buffer overflow assert (self.replay_buffer_size > min_replay_size) self.replay_buffer_size = np.maximum(min_replay_size, self.replay_buffer_size) return def _build_nets(self, json_data): assert self.ACTOR_NET_KEY in json_data assert self.CRITIC_NET_KEY in json_data actor_net_name = json_data[self.ACTOR_NET_KEY] critic_net_name = json_data[self.CRITIC_NET_KEY] actor_init_output_scale = 1 if (self.ACTOR_INIT_OUTPUT_SCALE_KEY not in json_data ) else json_data[self.ACTOR_INIT_OUTPUT_SCALE_KEY] s_size = self.get_state_size() g_size = self.get_goal_size() a_size = self.get_action_size() # setup input tensors self.s_tf = tf.placeholder(tf.float32, shape=[None, s_size], name="s") self.a_tf = tf.placeholder(tf.float32, shape=[None, a_size], name="a") self.tar_val_tf = tf.placeholder(tf.float32, shape=[None], name="tar_val") self.adv_tf = tf.placeholder(tf.float32, shape=[None], name="adv") self.g_tf = tf.placeholder(tf.float32, shape=([None, g_size] if self.has_goal() else None), name="g") self.old_logp_tf = tf.placeholder(tf.float32, shape=[None], name="old_logp") self.exp_mask_tf = tf.placeholder(tf.float32, shape=[None], name="exp_mask") with tf.variable_scope('main'): with tf.variable_scope('actor'): self.a_mean_tf = self._build_net_actor(actor_net_name, actor_init_output_scale) with tf.variable_scope('critic'): self.critic_tf = self._build_net_critic(critic_net_name) if (self.a_mean_tf != None): Logger.print2('Built actor net: ' + actor_net_name) if (self.critic_tf != None): Logger.print2('Built critic net: ' + critic_net_name) self.norm_a_std_tf = self.exp_params_curr.noise * tf.ones(a_size) norm_a_noise_tf = self.norm_a_std_tf * tf.random_normal(shape=tf.shape(self.a_mean_tf)) norm_a_noise_tf *= tf.expand_dims(self.exp_mask_tf, axis=-1) self.sample_a_tf = self.a_mean_tf + norm_a_noise_tf * self.a_norm.std_tf self.sample_a_logp_tf = TFUtil.calc_logp_gaussian(x_tf=norm_a_noise_tf, mean_tf=None, std_tf=self.norm_a_std_tf) return def _build_losses(self, json_data): actor_weight_decay = 0 if ( self.ACTOR_WEIGHT_DECAY_KEY not in json_data) else json_data[self.ACTOR_WEIGHT_DECAY_KEY] critic_weight_decay = 0 if ( self.CRITIC_WEIGHT_DECAY_KEY not in json_data) else json_data[self.CRITIC_WEIGHT_DECAY_KEY] norm_val_diff = self.val_norm.normalize_tf(self.tar_val_tf) - self.val_norm.normalize_tf( self.critic_tf) self.critic_loss_tf = 0.5 * tf.reduce_mean(tf.square(norm_val_diff)) if (critic_weight_decay != 0): self.critic_loss_tf += critic_weight_decay * self._weight_decay_loss('main/critic') norm_tar_a_tf = self.a_norm.normalize_tf(self.a_tf) self._norm_a_mean_tf = self.a_norm.normalize_tf(self.a_mean_tf) self.logp_tf = TFUtil.calc_logp_gaussian(norm_tar_a_tf, self._norm_a_mean_tf, self.norm_a_std_tf) ratio_tf = tf.exp(self.logp_tf - self.old_logp_tf) actor_loss0 = self.adv_tf * ratio_tf actor_loss1 = self.adv_tf * tf.clip_by_value(ratio_tf, 1.0 - self.ratio_clip, 1 + self.ratio_clip) self.actor_loss_tf = -tf.reduce_mean(tf.minimum(actor_loss0, actor_loss1)) norm_a_bound_min = self.a_norm.normalize(self.a_bound_min) norm_a_bound_max = self.a_norm.normalize(self.a_bound_max) a_bound_loss = TFUtil.calc_bound_loss(self._norm_a_mean_tf, norm_a_bound_min, norm_a_bound_max) self.actor_loss_tf += a_bound_loss if (actor_weight_decay != 0): self.actor_loss_tf += actor_weight_decay * self._weight_decay_loss('main/actor') # for debugging self.clip_frac_tf = tf.reduce_mean( tf.to_float(tf.greater(tf.abs(ratio_tf - 1.0), self.ratio_clip))) return def _build_solvers(self, json_data): actor_stepsize = 0.001 if ( self.ACTOR_STEPSIZE_KEY not in json_data) else json_data[self.ACTOR_STEPSIZE_KEY] actor_momentum = 0.9 if ( self.ACTOR_MOMENTUM_KEY not in json_data) else json_data[self.ACTOR_MOMENTUM_KEY] critic_stepsize = 0.01 if ( self.CRITIC_STEPSIZE_KEY not in json_data) else json_data[self.CRITIC_STEPSIZE_KEY] critic_momentum = 0.9 if ( self.CRITIC_MOMENTUM_KEY not in json_data) else json_data[self.CRITIC_MOMENTUM_KEY] critic_vars = self._tf_vars('main/critic') critic_opt = tf.train.MomentumOptimizer(learning_rate=critic_stepsize, momentum=critic_momentum) self.critic_grad_tf = tf.gradients(self.critic_loss_tf, critic_vars) self.critic_solver = MPISolver(self.sess, critic_opt, critic_vars) self._actor_stepsize_tf = tf.get_variable(dtype=tf.float32, name='actor_stepsize', initializer=actor_stepsize, trainable=False) self._actor_stepsize_ph = tf.get_variable(dtype=tf.float32, name='actor_stepsize_ph', shape=[]) self._actor_stepsize_update_op = self._actor_stepsize_tf.assign(self._actor_stepsize_ph) actor_vars = self._tf_vars('main/actor') actor_opt = tf.train.MomentumOptimizer(learning_rate=self._actor_stepsize_tf, momentum=actor_momentum) self.actor_grad_tf = tf.gradients(self.actor_loss_tf, actor_vars) self.actor_solver = MPISolver(self.sess, actor_opt, actor_vars) return def _decide_action(self, s, g): with self.sess.as_default(), self.graph.as_default(): self._exp_action = self._enable_stoch_policy() and MathUtil.flip_coin( self.exp_params_curr.rate) #print("_decide_action._exp_action=",self._exp_action) a, logp = self._eval_actor(s, g, self._exp_action) return a[0], logp[0] def _eval_actor(self, s, g, enable_exp): s = np.reshape(s, [-1, self.get_state_size()]) g = np.reshape(g, [-1, self.get
cloudbase/nova-virtualbox
nova/api/openstack/compute/plugins/v3/flavors_extraspecs.py
Python
apache-2.0
6,450
0
# Copyright 2010 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six import webob from nova.api.openstack.compute.schemas.v3 import flavors_extraspecs from nova.api.openstack import extensions from nova.api.openstack import wsgi from nova.api import validation from nova import exception from nova.i18n import _ from nova import objects from nova import utils ALIAS = 'os-flavor-extra-specs' authorize = extensions.extension_authorizer('compute', 'v3:' + ALIAS) class FlavorExtraSpecsController(wsgi.Controller): """The flavor extra specs API controller for the OpenStack API.""" def __init__(self, *args, **kwargs): super(FlavorExtraSpecsController, self).__init__(*args, **kwargs) def _get_extra_specs(self, context, flavor_id): flavor = objects.Flavor.get_by_flavor_id(context, flavor_id) return dict(extra_specs=flavor.extra_specs) # NOTE(gmann): Max length for numeric value is being checked # explicitly as json schema cannot have max length check for numeric value def _check_extra_specs_value(self, specs): for key, value in specs.iteritems(): try: if isinstance(value, (six.integer_types, float)): value = six.text_type(value) utils.check_string_length(value, 'extra_specs value', max_length=255) except exception.InvalidInput as error: raise webob.exc.HTTPBadRequest( explanation=error.format_message()) @extensions.expected_errors(()) def index(self, req, flavor_id): """Returns the list of extra specs for a given flavor.""" context = req.environ['nova.context'] authorize(context, action='index') return self._get_extra_specs(context, flavor_id) # NOTE(gmann): Here should be 201 instead of 200 by v2.1 # +microversions because the flavor extra specs has been created # completely when returning a response. @extensions.expected_errors((400, 404, 409)) @validation.schema(flavors_extraspecs.create) def create(self, req, flavor_id, body): context = req.environ['nova.context'] authorize(context, action='create') specs = body['extra_specs'] self._check_extra_specs_value(specs) try: flavor = objects.Flavor.get_by_flavor_id(context, flavor_id) flavor.extra_specs = dict(flavor.extra_specs, **specs) flavor.save() except exception.FlavorExtraSpecUpdateCreateFailed as e: raise webob.exc.HTTPConflict(explanation=e.format_message()) except exception.FlavorNotFound as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) return body @extensions.expected_errors((400, 404, 409)) @validation.schema(flavors_extraspecs.update) def update(self, req, flavor_id, id, body): context = req.environ['nova.context'] authorize(context, action='update') self._check_extra_specs_value(body) if id not in body: expl = _('Request body and URI mismatch') raise webob.exc.HTTPBadRequest(explanation=expl) try: flavor = objects.Flavor.get_by_flavor_id(context, flavor_id) flavor.extra_specs = dict(flavor.extra_specs, **body) flavor.save() except exception.FlavorExtraSpecUpdateCreateFailed as e: raise webob.exc.HTTPConflict(explanation=e.format_message()) except exception.FlavorNotFound as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) return body @extensions.expected_errors(404) def show(self, req, flavor_id, id): """Return a single extra spec item.""" context = req.environ['nova.context'] authorize(context, action='show') try: flavor = objects.Flavor.get_by_flavor_id(context, flavor_id) return {id: flavor.extra_specs[id]} except exception.FlavorNotFound as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) except KeyError: msg = _("Flavor %(flavor_id)s has no extra specs with " "key %(key)s.") % dict(flavor_id=flavor_id, key=id) raise webob.exc.HTTPNotFound(explanation=msg) # NOTE(gmann): Here should be 204(No Con
tent) instead of 200 by v2.1 # +microversions because the flavor extra specs has been deleted # completely when returning a response. @extensions.expected_errors(404) def delete(self, req, flavor_id, id): """Deletes an existing extra spec.""" context = req.environ['nova.context'] authorize(context, action='delete') try: flavor = objects.Flavor.get_by_flavor_id(context, flavor_id)
del flavor.extra_specs[id] flavor.save() except (exception.FlavorExtraSpecsNotFound, exception.FlavorNotFound) as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) except KeyError: msg = _("Flavor %(flavor_id)s has no extra specs with " "key %(key)s.") % dict(flavor_id=flavor_id, key=id) raise webob.exc.HTTPNotFound(explanation=msg) class FlavorsExtraSpecs(extensions.V3APIExtensionBase): """Flavors extra specs support.""" name = 'FlavorExtraSpecs' alias = ALIAS version = 1 def get_resources(self): extra_specs = extensions.ResourceExtension( 'os-extra_specs', FlavorExtraSpecsController(), parent=dict(member_name='flavor', collection_name='flavors')) return [extra_specs] def get_controller_extensions(self): return []
zbeaver4/python-webpage-monitor-slackbot
rtmbot.py
Python
mit
7,767
0.007596
#!/usr/bin/env python import sys sys.dont_write_bytecode = True import glob import re import yaml import json import os import sys import time import logging import requests import platform import imp from argparse import ArgumentParser from slackclient import SlackClient def dbg(debug_string): if debug: logging.info(debug_string) class RtmBot(object): def __init__(self, token): self.last_ping = 0 self.token = token self.bot_plugins = [] self.slack_client = None def connect(self): """Convenience method that creates Server instance""" self.slack_client = SlackClient(self.token) self.slack_client.rtm_connect() def start(self): self.connect() self.load_plugins() repeat_reply = None while True: for reply in self.slack_client.rtm_read(): self.input(reply) if 'text' in reply: words = reply['text'].split() first_word = words[0].lower() #Make a repeater if first_word in ['monitor', 'monitor_id', 'monitor_text'] and len(words) > 1: try: webpage_response = requests.get(re.sub('<|>', '', words[1]).split('|')[0]).status_code if webpage_response == 200: repeat_reply = reply.copy() start_time = time.time() except: pass #stop the repeating if the user calls it quits elif first_word == 'quit_monitor': if repeat_reply is not None: repeat_reply = None self.crons() self.output() self.autoping() time.sleep(.1) #See if it's time to check the website again if repeat_reply is not None: time_diff = time.time() - start_time if time_diff > 30: self.input(repeat_reply) start_time = time.time() def autoping(self): #hardcode the interval to 3 seconds now = int(time.time()) if now > self.last_ping + 3: self.slack_client.server.ping() self.last_ping = now def input(self, data): if "type" in data: function_name = "process_" + data["type"] dbg("got {}".format(function_name)) for plugin in self.bot_plugins: plugin.register_jobs() plugin.do(function_name, data) def output(self): for plugin in self.bot_plugins: limiter = False for output in plugin.do_output(): channel = self.slack_client.server.channels.find(output[0]) if channel != None and output[1] != None: if limiter == True: time.sleep(.1) limiter = False message = output[1].encode('ascii','ignore') channel.send_message("{}".format(message)) limiter = True def crons(self): for plugin in self.bot_plugins: plugin.do_jobs() def load_plugins(self): for plugin in glob.glob(directory+'/plugins/*'): sys.path.insert(0, plugin) sys.path.insert(0, directory+'/plugins/') for plugin in glob.glob(directory+'/plugins/*.py') + glob.glob(directory+'/plugins/*/*.py'): logging.info(plugin) name = plugin.split('/')[-1][:-3] # try: self.bot_plugins.append(Plugin(name)) # except: # print "error loading plugin %s" % name class Plugin(object): def __init__(self, name, plugin_config={}): self.name = name self.jobs = [] if platform.system() == 'Windows': self.module = imp.load_source(name, name + '.py') else: self.module = __import__(name) self.register_jobs() self.outputs = [] if name in config: logging.info("config found for: " + name) self.module.config = config[name] if 'setup' in dir(self.module): self.module.setup() def register_jobs(self): if 'crontable' in dir(self.module): for interval, function in self.module.crontable: self.jobs.append(Job(interval, eval("self.module."+function))) logging.info(self.module.crontable) self.module.crontable = [] else: self.module.crontable = [] def do(self, function_name, data): if function_name in dir(self.module): #this makes the plugin fail with stack trace in debug mode if not debug: try: eval("self.module."+function_name)(data) except: dbg("problem in module {} {}".format(function_name, data)) else: eval("self.module."+function_name)(data) if "catch_all" in dir(self.module): try: self.module.catch_all(data) except: dbg("problem in catch all") def do_jobs(self): for job in self.jobs: job.check() def do_output(self): output = [] while True: if 'outputs' in dir(self.module): if len(self.module.outputs) > 0: logging.info("output from {}".format(self.module)) output.append(self.module.outputs.pop(0)) else: break else: self.module.outputs = [] return output class Job(object): def __init__(self, interval, function): self.function = function self.interval = interval self.lastrun = 0 def __str__(self): return "{} {} {}".format(self.function, self.interval, self.lastrun) def __repr__(self): return self.__str__() def check(self): if self.lastrun + self.interval < time.time(): if not debug: try: self.function() except: dbg("problem") else: self.function() self.lastrun = time.time() pass class UnknownChannel(Exception): pass def main_loop(): if "LOGFILE" in config: logging.basicConfig(filename=config["LOGFILE"], level=logging.INFO, format='%(asctime)s %(message)s') logging.info(directory) try: bot.start() except KeyboardInterrupt: sys.exit(0) except: logging.exception('OOPS') def parse_args(): parser = ArgumentParser() parser.add_argument( '-c', '--config', help='Full path to config file.', metavar='path' ) return parser.parse_args() if __name__ == "__main__": args = parse_args() directory = os.path.dirname(sys.argv[0]) if not directory.startswith('/'):
directory = os.path.abspath("{}/{}".format(os.getcwd(), directory )) config = yaml.load(file(args.config or 'rtmbot.conf', 'r')) debug = config["DEBUG"]
bot = RtmBot(config["SLACK_TOKEN"]) site_plugins = [] files_currently_downloading = [] job_hash = {} if config.has_key("DAEMON"): if config["DAEMON"]: import daemon with daemon.DaemonContext(): main_loop() main_loop()
pakit/recipes
parallel.py
Python
bsd-3-clause
883
0
""" Formula for building parallel """ from pakit import Archive, Recipe class Parallel(Recipe): """ GNU parallel executes shell jobs in parallel """ def __init__(self): super(Parallel, self).__init__() self.homepage = 'http://www.gnu.org/software/parallel' self.repos = { 'unstable': Archive('https://ftp.gnu.org/gnu/parallel/'
'parallel-20181022.tar.bz2', hash='2e84dee3556cbb8f6a3794f5b21549faffb132' 'db3fc68e2e95922963adcbdbec') } self.repos['stable'] = self.repos['un
stable'] def build(self): self.cmd('./configure --prefix={prefix}') self.cmd('make install') def verify(self): lines = self.cmd('parallel --version').output() assert lines[0].find('GNU parallel') != -1
isi-nlp/bolinas
common/hgraph/amr_corpus_reader.py
Python
mit
4,973
0.019706
#!/usr/bin/env python2 from hgraph import Hgraph import amr_graph_description_parser #import tree import re import sys import string from collections import defaultdict as ddict def format_tagged(s): #return [tuple(p.split('/')) for p in s.split()] return [p.rsplit('-',1)[0] for p in s.split()] def format_amr(l): amr_s = ' '.join(l) amr_g = Hgraph.from_string(amr_s) return amr_g def read_to_empty(f): lines = [] while True: l = f.readline().strip() if not l: return lines lines.append(l) def format_constituents(l): return nltk.tree.ParentedTree("\n".join(l)) def format_alignments(l, amr): """ Parse alignment descriptions from file """ r = [] for a in l: m = re.match(r'(\S+)\s+:(\S+)\s+(\S+)\s+(.+)\-(\d+)', a) if m: var = m.group(1) role = m.group(2) filler = m.group(3).replace('"','') token = m.group(4) token_id = int(m.group(5)) - 1 else: m = re.match(r'ROOT\s+([^\-]+)\-(\d+)', a) if m: var = None role = "ROOT" filler = amr.roots[0].replace('"','') token = m.group(1) token_id = int(m.group(2)) - 1 else: sys.exit(1) amr_triple = (var, role, (filler,)) r.append((amr_triple, token_id)) return r textlinematcher = re.compile("^(\d+)\.(.*?)\((.*)\)?$") def format_text(l): match = textlinematcher.match(l.strip()) if not match: raise ValueError, "Not a valid text line in Ulf corpus: \n
%s \n"%l
s_no = int(match.group(1)) text = match.group(2).strip().split(" ") s_id = match.group(3).strip() return s_id, s_no, text def plain_corpus(f): while True: x = read_to_empty(f) if not x: raise StopIteration amr = format_amr(x) yield amr def aligned_corpus(f): """ Read the next parsed sentence from an input file using the aligned AMR/tagged string format. """ while True: l = f.readline() if not l: raise StopIteration while l.strip().startswith("#") or l.strip().startswith("==") or not l.strip(): l = f.readline() if not l: raise IOError, "AMR data file ended unexpectedly." sent_id = int(l) l = f.readline() amr = format_amr(read_to_empty(f)) tagged = format_tagged(f.readline()) l = f.readline() alignments = format_alignments(read_to_empty(f), amr) p = SentenceWithHgraph(sent_id, sent_id, amr, tagged, None, alignments) yield p def ulf_corpus(f): """ Read the next parsed sentence from an input file using Ulf's format. """ while True: l = f.readline() if not l: raise StopIteration while l.strip().startswith("#") or not l.strip(): l = f.readline() if not l: raise IOError, "AMR data file ended unexpectedly- sentence without AMR." sent_id, sent_no, tagged = format_text(l.strip()) l = f.readline() amr = format_amr(read_to_empty(f)) p = SentenceWithHgraph(sent_id, sent_no, amr, tagged, None, None) yield p def metadata_amr_corpus(f): """ Read the next parsed sentence from an input file using the AMR meta data format. """ metadata = [] sentence = "" sent_id = "" buff = [] idmatcher = re.compile("# ::id ([^ ]+) ") sentmatcher = re.compile("# ::snt (.*)") count = 1 parser = amr_graph_description_parser.GraphDescriptionParser() while True: l = f.readline() if not l: raise StopIteration l = l.strip() if not l: if buff: amr = parser.parse_string(" ".join(buff)) yield SentenceWithHgraph(sent_id, count, amr, sentence, metadata = metadata) count += 1 buff = [] metadata = [] sentence = "" sent_id = "" elif l.startswith("#"): metadata.append(l) match = idmatcher.match(l) if match: sent_id = match.group(1) match = sentmatcher.match(l) if match: sentence = match.group(1) else: buff.append(l) class SentenceWithHgraph(): """ A data structure to hold Hgraph <-> sentence pairs with PTB parses and token to Hgraph edge elignments. """ def __init__(self, sent_id, sent_no, amr, tagged, ptb = None, edge_alignments = None, metadata = None): self.sent_no = sent_no self.sent_id = sent_id self.amr = amr self.tagged = tagged self.ptb = ptb self.alignments = edge_alignments self.metadata = metadata #in_f = open(sys.argv[1],'r') #corpus = metadata_amr_corpus(in_f)
andrenam/Fogger
fogger/FoggerWindow.py
Python
gpl-3.0
10,132
0.002764
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*- ### BEGIN LICENSE # Copyright (C) 2012 Owais Lone <hello@owaislone.org> # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License version 3, as published # by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranties of # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR # PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program. If not, see <http://www.gnu.org/licenses/>. ### END LICENSE from os import path as op import re import requests import urlparse import tempfile import threading from BeautifulSoup import BeautifulSoup, SoupStrainer import gettext from gettext import gettext as _ gettext.textdomain('fogger') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, GObject, Gio # pylint: disable=E0611 import logging logger = logging.getLogger('fogger') from fogger_lib import Window, IconChooserDialog, ConfirmDialog from fogger_lib import FogAppManager from fogger_lib.exceptions import BaseFogAppException from fogger_lib.helpers import get_network_proxies from fogger_lib.consts import DEFAULT_APP_ICON from fogger_lib.BackgroundLoader import get_chameleonic_pixbuf_from_svg from fogger.AboutFoggerDialog import AboutFoggerDialog ICON_SIZE = Gtk.icon_size_register('FoggerIconSize', 80, 80) GLib.threads_init() # See fogger_lib.Window.py for more details about how this class works class FoggerWindow(Window): __gtype_name__ = "FoggerWindow" def finish_initializing(self, builder): # pylint: disable=E1002 """Set up the main window""" super(FoggerWindow, self).finish_initializing(builder) self.AboutDialog = AboutFoggerDialog self.url = self.builder.get_object('url_entry') self.name = self.builder.get_object('name_entry') self.image = self.builder.get_object('image') self.image_eb = self.builder.get_object('image_eb') self.create_button = self.builder.get_object('create_button') self.spinner = self.builder.get_object('spinner') self.error_message = self.builder.get_object('error') self.background_image = self.builder.get_object('bgimage') self.icon = DEFAULT_APP_ICON self.themed_icon =
None self.icon_selected = False self.icon_theme = Gtk.IconTheme.get_default() self.setup_drop_targets() self.background_image.set_from_pixbuf(get_chameleonic_pixbuf_from_svg( 'background-app.svg')) def validate_form(self, widget, data=None): url = self.url.get_text() name = self.name.g
et_text() sensitive = url and name self.create_button.set_sensitive(sensitive) def setup_drop_targets(self): self.drag_dest_set(Gtk.DestDefaults.ALL, [], Gdk.DragAction.MOVE) self.connect("drag-data-received", self.on_drag_data_received) self.drag_dest_add_uri_targets() def on_drag_data_received(self, widget, context, x, y, data, info, time): try: path = data.get_uris()[0] except IndexError: return else: path = path.replace('file://', '') self.setup_icon(path) def on_cancel(self, widget, data=None): self.destroy() def on_url_changed(self, widget, data=None): pass def on_icon_clicked(self, widget, data=None): icon_chooser = IconChooserDialog(self) response = icon_chooser.run() if response == Gtk.ResponseType.OK: path = icon_chooser.get_filename() self.setup_icon(path) icon_chooser.destroy() def on_name_changed(self, widget, data=None): if self.icon_selected: return name = self.name.get_text().lower().strip().replace(' ', '-') words = name.split('-') subnames = [] for i, word in enumerate(words): x = '-'.join(words[:(i + 1) * -1]) if x: subnames.append(x) search_strings = [name] + subnames icon = self.icon_theme.choose_icon(search_strings, 0, Gtk.IconLookupFlags.GENERIC_FALLBACK) if icon: filename = icon.get_filename() path, ext = op.splitext(filename) _, themed_icon = op.split(path) self.setup_icon(filename, themed_icon, False) else: self.setup_icon(DEFAULT_APP_ICON, None, False) def setup_icon(self, path, name=None, selected=True): pixbuf = GdkPixbuf.Pixbuf.new_from_file(path) self.image.props.pixbuf = pixbuf.scale_simple(80, 80, GdkPixbuf.InterpType.BILINEAR) self.icon = path self.themed_icon = name self.icon_selected = selected def on_create(self, widget, data=None): name = self.name.get_text() manager = FogAppManager() existing = manager.get_by_name(name) if existing: confirm = ConfirmDialog('Fogger', _('There\'s an app for that!'), _('A fog app already exists by that name. '\ 'Would you like to replace it with a new one?'), existing.icon, self, _('Replace')) response = confirm.run() confirm.destroy() if response != Gtk.ResponseType.YES: self.name.grab_focus() return self.set_loading_url(True) self.error_message.hide() thread = threading.Thread(target=self.verify_url) thread.daemon = True thread.start() def create_app(self, url, name): manager = FogAppManager() try: app = manager.create(name, url, self.icon, self.themed_icon) except BaseFogAppException: logger.error("Error creating App %s" % url) else: app = Gio.DesktopAppInfo.new_from_filename(app.desktop_file) app.launch([], Gio.AppLaunchContext()) self.destroy() def set_loading_url(self, loading): if loading: self.spinner.show() self.create_button.hide() self.url.set_sensitive(False) self.name.set_sensitive(False) else: self.spinner.hide() self.create_button.show() self.url.set_sensitive(True) self.name.set_sensitive(True) def set_error_message(self, message): self.error_message.set_markup('<tt><small>%s</small></tt>' % message) self.error_message.show() def verify_url(self): logger.debug('Fetching url') url = self.url.get_text() name = self.name.get_text() verified = False proxies = get_network_proxies() try: if url.startswith('file://'): GObject.idle_add(self.set_loading_url, False) GObject.idle_add(self.create_app, url, name) return elif not url.startswith(('http://', 'https://',)): url = 'http://%s' % url try: logger.debug('starting') response = requests.get(url, proxies=proxies) verified = True logger.debug('finishing') except requests.RequestException: logger.debug('Error downloading url %s' % url) GObject.idle_add(self.set_loading_url, False) GObject.idle_add(self.set_error_message, _('The URL %s could not be reached.\nPlease double check'\ ' the URL you provided and try again.' % url)) return SkipIcon = type('SkipIcon', (Exception,), {}) if self.icon != DEFAULT_APP_ICON: raise SkipIcon() # Try to find the apple-touch-icon logger.debug('parsing') soup = BeautifulSoup(response.content, parseOnlyThese=SoupStrainer('link')
aljim/deploymentmanager-samples
community/cloud-foundation/src/cloud_foundation_toolkit/dm_utils.py
Python
apache-2.0
3,240
0
from collections import namedtuple import io import re from six.moves.urllib.parse import urlparse from apitools.base.py import exceptions as apitools_exceptions from googlecloudsdk.api_lib.deployment_manager import dm_base from ruamel.yaml import YAML DM_OUTPUT_QUERY_REGEX = re.compile( r'!DMOutput\s+(?P<url>\bdm://[-/a-zA-Z0-9]+\b)|' r'\$\(out\.(?P<token>[-.a-zA-Z0-9]+)\)' ) DMOutputQueryAttributes = namedtuple( 'DMOutputQueryAttributes', ['project', 'deployment', 'resource', 'name'] ) @dm_base.UseDmApi(dm_base.DmApiVersion.V2) class DM_API(dm_base.DmCommand): """ Class representing the DM API This a proxy class only, so other modules in this project only import this local class instead of gcloud's. Here's the source: https://github.com/google-cloud-sdk/google-cloud-sdk/blob/master/lib/googlecloudsdk/api_lib/deployment_manager/dm_base.py """ API = DM_API() def get_deployment(project, deployment): try: return API.client.deployments.Get( API.messages.DeploymentmanagerDeploymentsGetRequest( project=project, deployment=deployment ) ) except apitools_exceptions.HttpNotFoundError as _: return None def get_manifest(project, deployment): deployment_rsp = get_deployment(project, deployment) return API.client.manifests.Get( API.messages.DeploymentmanagerManifestsGetRequest( project=project, deployment=deployment, manifest=deployment_rsp.manifest.split('/')[-1] ) ) def parse_dm_output_url(url, project=''): error_msg = ( 'The url must look like '
'"dm://${project}/${deployment}/${resource}/${name}" or' '"dm://${deployment}/${resource}/${name}"' ) parsed_url = urlparse(url) if parsed_url.scheme != 'dm': raise ValueError(error_msg) path = parsed_url.path.split('/')[1:] # path == 2 if project isn't specified in the URL # path == 3 if project is specified in the URL if len(path) == 2
: args = [project] + [parsed_url.netloc] + path elif len(path) == 3: args = [parsed_url.netloc] + path else: raise ValueError(error_msg) return DMOutputQueryAttributes(*args) def parse_dm_output_token(token, project=''): error_msg = ( 'The url must look like ' '$(out.${project}.${deployment}.${resource}.${name}" or ' '$(out.${deployment}.${resource}.${name}"' ) parts = token.split('.') # parts == 3 if project isn't specified in the token # parts == 4 if project is specified in the token if len(parts) == 3: return DMOutputQueryAttributes(project, *parts) elif len(parts) == 4: return DMOutputQueryAttributes(*parts) else: raise ValueError(error_msg) def get_deployment_output(project, deployment, resource, name): manifest = get_manifest(project, deployment) layout = YAML().load(manifest.layout) for r in layout.get('resources', []): if r['name'] != resource: continue for output in r.get('outputs', []): if output['name'] == name: return output['finalValue']
ojarva/davis-weatherlink-scraper
setup.py
Python
bsd-3-clause
1,407
0.001421
from setuptools import setup, find_packages from codecs import open from os import path here = path.abspath(path.d
irname(__file__)) with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='davis-weatherlink-scraper', version='0.1.0', description='Scraper and parser for Davis Weatherlink data', long_description=long_description, url='https://github.com/ojarva/davis-weatherlink-scraper', author='Olli Jarva', author_email='olli@jarva.fi', license='BSD', classifiers=[ 'Development Status :: 4
- Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: Implementation :: PyPy', ], keywords='davis weatherlink weather', packages=["davis_weatherlink_scraper"], install_requires=['beautifulsoup4==4.4.1', 'requests==2.20.0', 'docopt==0.6.2', 'redis==2.10.5'], scripts=["davis_weatherlink_scraper/weatherlink_redis_publisher", "davis_weatherlink_scraper/weatherlink"], test_suite="tests", extras_require={ 'dev': ['twine', 'wheel'], }, )
javaarchive/PIDLE
ccode.py
Python
mit
846
0.049645
replacing='qwertyuiopasdfghjklzxcvbnm )([]\/{}!@#$%^&*' a='\/abcdefghijklmnopqrstuvwxyz() }{][*%$&^#@!' replacing=list(replacing) a=list(a) d={} e={} if len(replacing)==len(a): for x in range(len(a)): d[replacing[x]]=a[x] e[a[x]]=replacing[x] def encypt(dict,string): 'code' code=[] for x in string: code.append(dict[x]) return ''.join(code) def decypt(dict,string): 'unc
ode' decode=[] for x in string: decode.append(dict[x]) return ''.join(decode) if __name__=='__main__': c=input('code:') code=encypt(e,c) decode=decypt(d,c)
print('encypts to',code) print('decypt to',decode) input()
unnikrishnankgs/va
venv/lib/python3.5/site-packages/tensorflow/contrib/quantization/python/array_ops.py
Python
bsd-2-clause
1,156
0
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Quantized Array Operations.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=unused-import from tensorflow.python.ops import gen_array_ops as quantized_gen_array_ops from tensorflow.python.ops.gen_ar
ray_ops import dequantize from tensorflow.python.ops.gen_array_ops import quantize_v2 from tensorflow.python.ops.gen_array_ops import quantized_concat # pylint: enable=unu
sed-import
babyliynfg/cross
tools/project-creator/Python2.6.6/Lib/test/test_bigmem.py
Python
mit
39,354
0.001347
from test import test_support from test.test_support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest import unittest import operator import string import sys # Bigmem testing houserules: # # - Try not to allocate too many large objects. It's okay to rely on # refcounting semantics, but don't forget that 's = create_largestring()' # doesn't release the old 's' (if it exists) until well after its new # value has been created. Use 'del s' before the create_largestring call. # # - Do *not* compare large objects using assertEquals or similar. It's a # lengty operation and the errormessage will be utterly useless due to # its size. To make sure whether a result has the right contents, better # to use the strip or count methods, or compare meaningful slices. # # - Don't forget to test for large indices, offsets and results and such, # in addition to large sizes. # # - When repeating an object (say, a substring, or a small list) to create # a large object, make the subobject of a length that is not a power of # 2. That way, int-wrapping problems are more easily detected. # # - While the bigmemtest decorator speaks of 'minsize', all tests will # actually be called with a much smaller number too, in the normal # test run (5Kb currently.) This is so the tests themselves get frequent # testing. Consequently, always make all large allocations based on the # passed-in 'size', and don't rely on the size being very large. Also, # memuse-per-size should remain sane (less than a few thousand); if your # test uses more, adjust 'size' upward, instead. class StrTest(unittest.TestCase): @bigmemtest(minsize=_2G, memuse=2) def test_capitalize(self, size): SUBSTR = ' abc def ghi' s = '-' * size + SUBSTR caps = s.capitalize() self.assertEquals(caps[-len(SUBSTR):], SUBSTR.capitalize()) self.assertEquals(caps.lstrip('-'), SUBSTR) @bigmemtest(minsize=_2G + 10, memuse=1) def test_center(self, size): SUBSTR = ' abc def ghi' s = SUBSTR.center(size) self.assertEquals(len(s), size) lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2 if len(s) % 2: lpadsize += 1 self.assertEquals(s[lpadsize:-rpadsize], SUBSTR) self.assertEquals(s.strip(), SUBSTR.strip()) @precisionbigmemtest(size=_2G - 1, memuse=1) def test_center_unicode(self, size): SUBSTR = u' abc def ghi' try: s = SUBSTR.center(size) except OverflowError: pass # acceptable on 32-bit else: self.assertEquals(len(s), size) lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2 if len(s) % 2: lpadsize += 1 self.assertEquals(s[lpadsize:-rpadsize], SUBSTR) self.assertEquals(s.strip(), SUBSTR.strip()) del s @bigmemtest(minsize=_2G, memuse=2) def test_count(self, size): SUBSTR = ' abc def ghi' s = '.' * size + SUBSTR self.assertEquals(s.count('.'), size) s += '.' self.assertEquals(s.count('.'), size + 1) self.assertEquals(s.count(' '), 3) self.assertEquals(s.count('i'), 1) self.assertEquals(s.count('j'), 0) @bigmemtest(minsize=_2G + 2, memuse=3) def test_decode(self, size): s = '.' * size self.assertEquals(len(s.decode('utf-8')), size) def basic_encode_test(self, size, enc, c=u'.', expectedsize=None): if expectedsize is None: expectedsize = size s = c * size self.assertEquals(len(s.encode(enc)), expectedsize) @bigmemtest(minsize=_2G + 2, me
muse=3) def test_encode(self, size): return self.basic_encode_test(size, 'utf-8') @precisionbigmemtest(size=_4G // 6 + 2, memuse=2) def test_encode_raw_unicode_escape(self, size): try: return self.basic_encode_test(size, 'raw_unicode_escape') except MemoryError: pass # acceptable on 32-bit @precisionbigmemtest(size=_4G // 5 + 70, memuse=3) def test_encode_utf7(self, size): try: return
self.basic_encode_test(size, 'utf7') except MemoryError: pass # acceptable on 32-bit @precisionbigmemtest(size=_4G // 4 + 5, memuse=6) def test_encode_utf32(self, size): try: return self.basic_encode_test(size, 'utf32', expectedsize=4*size+4) except MemoryError: pass # acceptable on 32-bit @precisionbigmemtest(size=_2G-1, memuse=2) def test_decodeascii(self, size): return self.basic_encode_test(size, 'ascii', c='A') @precisionbigmemtest(size=_4G // 5, memuse=6+2) def test_unicode_repr_oflw(self, size): try: s = u"\uAAAA"*size r = repr(s) except MemoryError: pass # acceptable on 32-bit else: self.failUnless(s == eval(r)) @bigmemtest(minsize=_2G, memuse=2) def test_endswith(self, size): SUBSTR = ' abc def ghi' s = '-' * size + SUBSTR self.failUnless(s.endswith(SUBSTR)) self.failUnless(s.endswith(s)) s2 = '...' + s self.failUnless(s2.endswith(s)) self.failIf(s.endswith('a' + SUBSTR)) self.failIf(SUBSTR.endswith(s)) @bigmemtest(minsize=_2G + 10, memuse=2) def test_expandtabs(self, size): s = '-' * size tabsize = 8 self.assertEquals(s.expandtabs(), s) del s slen, remainder = divmod(size, tabsize) s = ' \t' * slen s = s.expandtabs(tabsize) self.assertEquals(len(s), size - remainder) self.assertEquals(len(s.strip(' ')), 0) @bigmemtest(minsize=_2G, memuse=2) def test_find(self, size): SUBSTR = ' abc def ghi' sublen = len(SUBSTR) s = ''.join([SUBSTR, '-' * size, SUBSTR]) self.assertEquals(s.find(' '), 0) self.assertEquals(s.find(SUBSTR), 0) self.assertEquals(s.find(' ', sublen), sublen + size) self.assertEquals(s.find(SUBSTR, len(SUBSTR)), sublen + size) self.assertEquals(s.find('i'), SUBSTR.find('i')) self.assertEquals(s.find('i', sublen), sublen + size + SUBSTR.find('i')) self.assertEquals(s.find('i', size), sublen + size + SUBSTR.find('i')) self.assertEquals(s.find('j'), -1) @bigmemtest(minsize=_2G, memuse=2) def test_index(self, size): SUBSTR = ' abc def ghi' sublen = len(SUBSTR) s = ''.join([SUBSTR, '-' * size, SUBSTR]) self.assertEquals(s.index(' '), 0) self.assertEquals(s.index(SUBSTR), 0) self.assertEquals(s.index(' ', sublen), sublen + size) self.assertEquals(s.index(SUBSTR, sublen), sublen + size) self.assertEquals(s.index('i'), SUBSTR.index('i')) self.assertEquals(s.index('i', sublen), sublen + size + SUBSTR.index('i')) self.assertEquals(s.index('i', size), sublen + size + SUBSTR.index('i')) self.assertRaises(ValueError, s.index, 'j') @bigmemtest(minsize=_2G, memuse=2) def test_isalnum(self, size): SUBSTR = '123456' s = 'a' * size + SUBSTR self.failUnless(s.isalnum()) s += '.' self.failIf(s.isalnum()) @bigmemtest(minsize=_2G, memuse=2) def test_isalpha(self, size): SUBSTR = 'zzzzzzz' s = 'a' * size + SUBSTR self.failUnless(s.isalpha()) s += '.' self.failIf(s.isalpha()) @bigmemtest(minsize=_2G, memuse=2) def test_isdigit(self, size): SUBSTR = '123456' s = '9' * size + SUBSTR self.failUnless(s.isdigit()) s += 'z' self.failIf(s.isdigit()) @bigmemtest(minsize=_2G, memuse=2) def test_islower(self, size): chars = ''.join([ chr(c) for c in ran
merfishtools/merfishtools-evaluation
scripts/fig-dataset-correlation.py
Python
mit
311
0
import svgutils.transform as sg from common import load_svg, label_plot
fig = sg.SVGFigure("4.1in", "1.8in") a = load_svg(snakemake.input[1]) b = load_svg(snakemake.input[0]) b.moveto(190, 0) la
= label_plot(5, 10, "a") lb = label_plot(185, 10, "b") fig.append([a, b, la, lb]) fig.save(snakemake.output[0])
r4rdsn/vk-raid-defender
vk_raid_defender/cli/cli.py
Python
mit
7,730
0.007869
from .. import __description__ from ..defender import VkRaidDefender, data, update_data #################################################################################################### LOGO = '''\ _ _ _ _ __ _ __ _| | __ _ __ __ _(_) __| | __| | ___ / _| ___ _ __ __| | ___ _ __ \ \ / / |/ / | '__/ _` | |/ _` | / _` |/ _ \ |_ / _ \ '_ \ / _` |/ _ \ '__| \ V /| < | | | (_| | | (_| | | (_| | __/ _| __/ | | | (_| | __/ | \_/ |_|\_\ |_| \__,_|_|\__,_| \__,_|\___|_| \___|_| |_|\__,_|\___|_| by alfred richardsn''' #################################################################################################### from ..logger import logger from ..settings import CLIENT_ID import re import os import sys import webbrowser from getpass import getpass from argparse import ArgumentParser from vk_api.exceptions import ApiError from requests.exceptions import InvalidSchema, ProxyError class CLIDefender(VkRaidDefender): def run(self, chat_ids, objectives): self._chat_ids = chat_ids self._objectives = objectives start_screen() logger.info('начинаю приём сообщений') try: self.listen() except KeyboardInterrupt: raise except Exception as e: start_screen() logger.critical('произошла критическая ошибка, перезапускаюсь', exc_info=True) self.listen() def start_screen(): os.system('cls' if os.name == 'nt' else 'clear') print(LOGO + '\n\n') def ask_yes_or_no(question, true_answer='y', false_answer='n', default_answer='', default=True): true_answer = true_answer.lower() false_answer = false_answer.lower() default_answer = default_answer.lower() output = question.strip() + ' (' + (true_answer.upper() + '/' + false_answer if default else true_answer + '/' + false_answer.upper()) + '): ' answer = None while answer not in (true_answer, false_answer, default_answer): answer = input(output).lower() if answer == true_answer: return True elif answer == false_answer: return False else: return default def register(): use_webbrowser = ask_yes_or_no('открыть ссылку для авторизации в веб-браузере по умолчанию?') print() oauth_url = 'https://oauth.vk.com/authorize?client_id={}&display=page&redirect_uri=https://oauth.vk.com/blank.html&scope=69632&response_type=token'.format(CLIENT_ID) if use_webbrowser: webbrowser.open(oauth_url, new=2) print('в веб-браузере только что была открыта ссылка для авторизации.') else: print(oauth_url
+ '\n') print('открой в веб-браузере страницу по ссылке выше.') token = None while token is None: user_input = getpass('авторизируйся на открытой странице при необходимости и вставь адресную строку страницы, на которую было осуществлено перенаправление: ') token = re.search(r'(
?:.*access_token=)?([a-f0-9]+).*', user_input) return token.group(1) def run(proxy=None, chat_ids=[], objectives=[], auto_login=False): token = data.get('token') proxies = data.get('proxies') if not token or (not auto_login and not ask_yes_or_no('использовать ранее сохранённые данные для авторизации?')): token = register() proxies = None IP_ADDRESS = re.compile(r'((socks5://)|(?:https?://))?(localhost|\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5})') if proxy: match = IP_ADDRESS.match(proxy) if not proxy or (not match and not auto_login): proxy = input('введи адрес прокси-сервера при необходимости его использования: ') while proxy: match = IP_ADDRESS.match(proxy) if match: break proxy = input('неверный формат адреса сервера, попробуй ещё раз: ') else: match = None if match: protocol, use_socks, ip = match.groups() if not protocol: use_socks = ask_yes_or_no('использовать протокол socks5 вместо http?') if not auto_login else False if use_socks: proxies = {'http': 'socks5://' + ip, 'https': 'socks5://' + ip} else: proxies = {'http': 'http://' + ip, 'https': 'https://' + ip} if auto_login or ask_yes_or_no('сохранить введённые данные для следующих сессий?'): data['token'] = token data['proxies'] = proxies update_data() start_screen() if not chat_ids: chat_ids = data.get('chat_ids') if not objectives: objectives = data.get('objectives') if chat_ids is None or objectives is None or (not auto_login and not ask_yes_or_no('использовать ранее сохранённые данные для работы?')): chat_ids = list(map(int, input('введи айди конф, в которых нужно защищать рейдеров, через пробел: ').split())) objectives = list(map(int, input('введи айди защищаемых рейдеров: ').split())) if auto_login or ask_yes_or_no('сохранить введённые данные для следующих сессий?'): data['chat_ids'] = chat_ids data['objectives'] = objectives update_data() try: defender = CLIDefender(token, proxies=proxies) except InvalidSchema: sys.exit('необходимо установить дополнительные зависимости для поддержки протокола socks5') except ApiError: del data['token'] update_data() sys.exit('введённый токен недействителен') except ProxyError: del data['proxies'] update_data() sys.exit('не удалось подключиться к прокси-серверу') defender.run(chat_ids, objectives) def main(): parser = ArgumentParser(prog='vk-raid-defender', description=__description__, usage='%(prog)s [опции]', add_help=False) group = parser.add_argument_group('опциональные аргументы') group.add_argument('-h', '--help', action='help', help='показать это сообщение о помощи и выйти') group.add_argument('-l', '--login', action='store_true', help='осуществить автоматическую авторизацию') group.add_argument('-p', '--proxy', metavar='proxy_address', help='адрес прокси-сервера') group.add_argument('-c', '--chats', type=int, nargs='+', metavar='chat', help='айди конф, в которых нужно защищать рейдеров') group.add_argument('-u', '--users', type=int, nargs='+', metavar='user', help='айди защищаемых рейдеров') args = parser.parse_args() try: run(args.proxy, args.chats, args.users, args.login) except KeyboardInterrupt: print() sys.exit() if __name__ == "__main__": main()
irlabs/BathroomTiles
GradientSliders.py
Python
mit
12,149
0.033254
# Gradient Sliders # Custom ControlP5 Compound Classes for percentage sliders # For Processing in Python import copy add_library('controlP5') class GradientController(object): def __init__(self, colorList, cp5, sliderClass): self.colorList = self.colorListFromColors(colorList) self.cp5 = cp5 self.Slider = sliderClass self.stopCounter = 0 self.controllerIdentity = 1 self.x = 0 self.y = 0 self.width = 100 self.height = 100 self.calculatedHeight = 0 self.sliderWidth = 50 self.sliderHeight = 10 self.backgroundColor = color(220) self.margin = 2 self.allStops = [] self.callbackActive = True self.testLerpSlider = None self.needsDisplay = True def setPosition(self, x, y): self.x = x self.y = y def setSize(self, w, h): self.width = w self.height = h def setSliderSize(self, w, h): self.sliderWidth = w self.sliderHeight = h def addOuterColorStops(self): beginStop = self.createColorstop(self.colorList, self.x, self.y, 0.0, False) self.allStops.append(beginStop) xAtEnd = (self.x + self.width) - self.sliderWidth endStop = self.createColorstop(self.colorList, xAtEnd, self.y, 1.0) self.allStops.append(endStop) def insertColorStop(self, position): for i, aStop in enumerate(self.allStops): if position < aStop['position']: insertX = self.positionOfSubStop(i, position, True)[0] newStop = self.createColorstop(self.colorList, insertX, self.y, position, False) self.allStops.insert(i, newStop) break self.recalcSubStopPositions() def addStopPositionSliders(self): # Calculate position x = self.x + self.sliderWidth + self.margin y = self.y + self.calculatedHeight + self.margin w = self.width - (2 * (self.sliderWidth + self.margin)) # Callback def positionSliderCallback(event): if self.callbackActive: if event.getAction() == 32: self.positionChanges(event.getController()) if event.getAction() == 8 or event.getAction() == 16: self.stopPositionDidChange(event.getController()) # Set the slider sliderName = "stop_position_%d_%d" % (self.stopCounter, self.controllerIdentity) pSlider = self.Slider(self.cp5, sliderName) pSlider.setCaptionLabel("") pSlider.setSliderMode(0) pSlider.setColorForeground(color(150)) pSlider.setColorBackground(color(70)) pSlider.setColorActive(color(220)) pSlider.setSize(w, self.sliderHeight) pSlider.setPosition(x, y) # For testing: (was 50) pSlider.setValue(40) pSlider.addCallback(positionSliderCallback) self.testLerpSlider = pSlider def positionChanges(self, aSlider): if self.callbackActive: # print "change %f" % aSlider.getValue() # Move stop self.allStops[1]['position'] = aSlider.getValue() / 100.0 self.recalcSubStopPositions() self.needsDisplay = True def stopPositionDidChange(self, aSlider): print "stopPositionDidChange" def display(self): # Sliders are drawn by cp5 # draw graph if self.needsDisplay: self.drawGraph(self.allStops) self.needsDisplay = False def getSliderValues(self): stopsData = [] for cStop in self.allStops: thisStop = {'position': cStop['position']} sliders = {} for i, slider in enumerate(cStop['sliders']): sliders[self.colorList[i]['code']] = slider.getValue() thisStop['values'] = sliders stopsData.append(thisStop) return {'stops': stopsData} def setSliderValues(self, stopsData): if len(stopsData) == len(self.allStops): self.callbackActive = False for i, stopValues in enumerate(stopsData): theStop = self.allStops[i] theStop['position'] = stopValues['position'] if stopValues.has_key('values'): if len(theStop['sliders']) != len(stopValues['values'].keys()): print "WARNING: Possible problem setting slider values - number of colors not matching" for key, value in stopValues['values'].iteritems(): indexOfSlider = next(index for (index, c) in enumerate(self.colorList) if c['code'] == key) slider = theStop['sliders'][indexOfSlider] slider.setValue(value) else: print "ERROR: Setting Slider Values Failed - 'values' key missing" self.callbackActive = True else: print "ERROR: Setting Slider Values Failed - number of stops not matching" def valueForKeyAtPosition(self, key, inFloat): # Find the index of the color with key colorIndex = 0 for i, c in enumerate(self.colorList): if key == c['code']: colorIndex = i break # Create allStopPositions stopPositions = [
] values = [] for i, cStop in enumerate(self.allStops): # collect stop positions v = cStop['sliders'][colorIndex].getValue() # set inbetween values # if len(stop
Positions) > 0: # # TODO: fix for right position (refactor testLerpSlider) # testLerpPosition = self.testLerpSlider.getValue() / 100.0 # prevStopPosition = stopPositions[-1] # nextStopPosition = cStop['position'] # stopPositions.append(lerp(prevStopPosition, nextStopPosition, testLerpPosition)) # # add inbetween value # values.append(lerp(values[-1], v, 0.5)) stopPositions.append(cStop['position']) # add value of slider with colorIndex values.append(v) # Find the two stop positions which are right and left of the given position prevStopPosition = 0.0 nextStopPosition = 0.0 prevValue = 0.0 nextValue = 0.0 relativePosition = 0.0 for i, p in enumerate(stopPositions): if inFloat <= p: prevP = stopPositions[i - 1] relativePosition = (inFloat - prevP) / (p - prevP) prevValue = values[i - 1] nextValue = values[i] break else: # inFloat is outside bounds of stopPosition range # Return the maximum stop position value return values[-1] return lerp(prevValue, nextValue, relativePosition) def recalcSubStopPositions(self): if len(self.allStops) > 2: for i, subStop in enumerate(self.allStops[1:-1]): pos = self.positionOfSubStop(i + 1) # Reposition sliders of substop for slider in subStop['sliders']: sliderPos = slider.getPosition() slider.setPosition(pos[0], sliderPos.y) def positionOfSubStop(self, indexOfStop, preInsertPosition = 0, preInsertionMode = False): w = self.sliderWidth numberOfStops = len(self.allStops) thePosition = self.allStops[indexOfStop]['position'] if preInsertionMode: numberOfStops += 1 thePosition = preInsertPosition availableWidth = self.width - ((numberOfStops * w) + ((numberOfStops - 1) * 2 * self.margin)) leadingSpace = availableWidth * thePosition precedingStopsWidth = indexOfStop * (w + (2 * self.margin)) x = self.x + leadingSpace + precedingStopsWidth return (int(x), int(w)) def colorListFromColors(self, colors): newList = copy.copy(colors) for c in newList: if c['code'] == "__": c['color'] = None else: c['color'] = color(c['r'], c['g'], c['b']) return newList def emptyDataStopSet(self): colorDataSet = [] for c in self.colorList: colorD = {} colorD['color'] = c['color'] colorD['name'] = c['name'] colorD['code'] = c['code'] colorD['values'] = [] colorD['hidden'] = True colorDataSet.append(colorD) return colorDataSet def drawGraph(self, stopsArray): # Collect the data # (Assume that every stop has the same amount and order of colors) # (The last stop has the color name) colorData = self.emptyDataStopSet() stopPositions = [] for cStop in stopsArray: # collect stop positions # # set inbetween values # if len(stopPositions) > 0: # # TODO: fix for right position (refactor testLerpSlider) # testLerpPosition = self.testLerpSlider.getValue() / 100.0 # prevStopPosition = stopPositions[-1] # nextStopPosition = cStop['position'] # stopPositions.append(lerp(prevStopPosition, nextStopPosition, testLerpPosition)) stopPositions.append(cStop['position']) # collect values and calculate inbetween values for i, slider in enumerate(cStop['sliders']): v = slider.getValue() # Make inbetween semi-stop # if len(colorData[i]['values']) > 0: # inbetween_v = lerp(colorData[i]['values'][-1], v, 0.5) # colorData[i]['values'].append(inbetween_v) colorData[i]['values'].append(v) if v > 0: colorData[i
pymber/algorithms
algorithms/sorting/bucket_sort.py
Python
mit
730
0.012329
#!/usr/bin/env python from math import (sqrt, ceil) from insertion_sort import * def bucket_sort(L=[]): ''' Unstable implementation of bucket sort. :param L: list of sortable el
ements. ''' if len(L) < 2: return L # create buckets num_bucket = sqrt(len(L)) interval = int(ceil(max(L) / num_bucket)) bucket = [ [] for x in range(int(num_bucket)+1) ] # bucket = [[]] * int(ceil(num_bucket)) if not bucket: return L # place each items in respective bucket for i in range(len(L)): bucket[int(L[i]/interval)].append(L[i]) # concatenate buckets in
to single array bucket = [ x for y in bucket for x in y ] # return optimized insertion sort return insertion_sort(bucket)
npoznans/python_etl
bayes_evening_workshop/Naive_Bayes_Evening_Workshop/classifiers.py
Python
mit
6,290
0.002226
from collections import Counter, defaultdict import math import pprint import functools # Advanced material here, feel free to ignore memoize if you like. # Long story short, it remembers the inputs and outputs to functions, # and if the same input is seen multiple times, then rather than # running the function multiple times, memoization just returns the # result of the function when it was first called with that input. # Memory expensive because it keeps track of past results, but # computationally nice because we don't recalculate the same thing # over and over. def memoize(obj): cache = obj.cache = {} @functools.wraps(obj) def memoizer(*args, **kwargs): if args not in cache: cache[args] = obj(*args, **kwargs) return cache[args] return memoizer def evaluate_classifier(classifier, class_of_interest, evaluation_data, verbose=False, progress=True): if verbose: print("Evaluating performance for class {}".format(class_of_interest)) tp, fp, tn, fn = 0, 0, 0, 0 # true positive, false positive, true negative, false negative count = 0 for dp in evaluation_data: count += 1 if progress: if count % 1000 == 0: print("progress: {} / {}".format(count, len(evaluation_data))) prediction = classifier.predict(dp) actual = dp.klass if actual == prediction: # we got it right!
if prediction == class_of_interest: tp += 1 else: tn += 1 else: # we got it wrong :( if prediction == class_of_interest: fp += 1 else: fn += 1 precision = float(tp) / (tp + fp) recall = float(tp) / (tp + fn) f1 = 2 * precision * recall / (precision + recall) if verbose: print("precision:"
, precision) print("recall:", recall) print("f1:", f1) return f1, precision, recall class NaiveBayesClassifier(object): def __init__(self, laplace_smoothing_constant=0.01): self.total_counter = 0 self.class_counter = Counter() self.feature_given_class_counter = defaultdict(Counter) # Hyperparameter that can be tuned via Cross Validation to improve performance self.laplace_smoothing_constant = laplace_smoothing_constant def _update_with_one_data_point(self, data_point): # Increment the total counter self.total_counter += 1 # Increment class_counter self.class_counter[data_point.klass] += 1 # Increment feature_given_class counter for each feature in featuredict for feature_name, feature_value in data_point.featuredict.items(): assert type(feature_value) == int, "only int typed feature values currently supported" # Bonus: can one extend Naive Bayes to real-valued features? (hint: yes) self.feature_given_class_counter[data_point.klass][feature_name] += feature_value def train(self, train_set, verbose=False): for data_point in train_set: self._update_with_one_data_point(data_point) if verbose: print("Training complete. Counters:") pprint.pprint(self.total_counter) pprint.pprint(self.class_counter) pprint.pprint(self.feature_given_class_counter) @memoize # Advanced material, see note on memoize above def _prior(self, klass): # Laplace smoothing numerator = self.laplace_smoothing_constant denominator = len(self.class_counter) * self.laplace_smoothing_constant # On top of the unsmoothed counts numerator += self.class_counter[klass] denominator += self.total_counter # Gives us our smoothed prior return float(numerator) / denominator @memoize # Advanced material, see note on memoize above def _vocabulary_size(self): vocab = set() for klass in self.class_counter: # for each class # get all the features in class and add them to total cross-class vocabulary vocab.update(set(self.feature_given_class_counter[klass])) return len(vocab) @memoize # Advanced material, see note on memoize above def _likelihood(self, klass, feature_name): # Laplace smoothing numerator = self.laplace_smoothing_constant denominator = self._vocabulary_size() * self.laplace_smoothing_constant # On top of the unsmoothed counts numerator += self.feature_given_class_counter[klass].get(feature_name, 0) denominator += sum(self.feature_given_class_counter[klass].values()) # Gives us our smoothed likelihood return float(numerator) / denominator def predict(self, data_point, verbose=False): # Where we'll store probabilities by class pseudo_probability_by_class = {} # Calculate the pseudo probability for each class for klass in self.class_counter: prior = self._prior(klass) # Aggregate likelihood likelihoods = [] for feature_name in data_point.featuredict: # for each feature # for each time the feature appeared for _ in range(data_point.featuredict[feature_name]): likelihoods.append(self._likelihood(klass, feature_name)) # Add prior and likelihoods in logspace to avoid floating point underflow. # The class with the highest log probability is still the most probable. numerator_terms = [prior] + likelihoods pseudo_probability_by_class[klass] = sum([math.log(t) for t in numerator_terms]) # Pick the class with the maximum probability and return it as our prediction sorted_probability_by_class = sorted(pseudo_probability_by_class.items(), # Sorts ascending by default, we want # biggest probability first => descending key=lambda x: x[1], reverse=True) prediction = sorted_probability_by_class[0][0] if verbose: print("Predicting: {}".format(prediction)) return prediction
tartley/pyweek11-cube
source/model/collision.py
Python
bsd-3-clause
1,781
0.003369
from euclid import Vector3 from ..util.vectors import tuple_of_ints class Collision(object): ''' detects if any objects in the world collide ''' def __init__(self, w
orld): world.item_added += self.world_add_item world.item_removed += self.world_remove_item self.occupied = {}
def world_add_item(self, item): if hasattr(item, 'bounds'): position = (0, 0, 0) if hasattr(item, 'position'): position = item.position self.add_item(position, item) def world_remove_item(self, item): if hasattr(item, 'bounds'): position = (0, 0, 0) if hasattr(item, 'position'): position = item.position self.remove_item(position, item) def get_items(self, location): if location is None: return set() if isinstance(location, Vector3): location = tuple_of_ints(location) return self.occupied.get(location, set()) def add_item(self, location, item): if isinstance(location, Vector3): location = tuple_of_ints(location) existing = self.occupied.get(location, set()) existing.add(item) self.occupied[location] = existing def remove_item(self, location, item): if isinstance(location, Vector3): location = tuple_of_ints(location) existing = self.occupied.get(location, set()) existing.remove(item) self.occupied[location] = existing def can_move_to(self, location): return not [ item for item in self.get_items(location) if hasattr(item, 'collide') and item.collide ]
BumagniyPacket/mosaic
run.py
Python
apache-2.0
65
0
from mos
aic import app
if __name__ == '__main__': app.run()
CharlesGust/data_structures
sort_quick/test_quick_sort.py
Python
mit
794
0
import py.test import unittest from quick_sort import sort import random class testQuickSort(unittest.TestCase): def test__init__(self): pass def test_sort_inorder(self): arr = [i for i in xrange(0, 10000)] sortarr = sort
(arr) for i in xrange(1, 10000): self.assertGreaterEqual(sortarr[i], sortarr[i-1]) def test_sort_revorder(self): arr = [i for i in xrange(10000, 0, -1)] sortarr = sort(arr) for i in xrange(1, 10000): self.assertGreaterEqual(sortarr[i], sortarr[i-1]) def test_sortrandorder(self): arr = [random.randint(0, 10000) for i in xrange(0, 10000)] sortarr = sort(arr) for i in xrange(1, 1000
0): self.assertGreaterEqual(sortarr[i], sortarr[i-1])
deepmind/jraph
jraph/examples/lstm.py
Python
apache-2.0
5,350
0.006168
# Copyright 2020 DeepMind Technologies Limited. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing,
software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Example of how to use recurrent networks (e.g.`LSTM`s) with `GraphNetwork`s. Models can use the mechanism for specifying nested node, edge, or global features to simultaneously keep inputs/embeddings together with a
per-node, per-edge or per-graph recurrent state. In this example we show an `InteractionNetwork` that uses an LSTM to keep a memory of the inputs to the edge model at each step of message passing, by using separate "embedding" and "state" fields in the edge features. Following a similar procedure, an LSTM could be added to the `node_update_fn`, or even the `global_update_fn`, if using a full `GraphNetwork`. Note it is recommended to use immutable container types to store nested edge, node and global features to avoid unwanted side effects. In this example we use `namedtuple`s. """ import collections from absl import app import haiku as hk import jax import jax.numpy as jnp import jax.tree_util as tree import jraph import numpy as np NUM_NODES = 5 NUM_EDGES = 7 NUM_MESSAGE_PASSING_STEPS = 10 EMBEDDING_SIZE = 32 HIDDEN_SIZE = 128 # Immutable class for storing nested node/edge features containing an embedding # and a recurrent state. StatefulField = collections.namedtuple("StatefulField", ["embedding", "state"]) def get_random_graph() -> jraph.GraphsTuple: return jraph.GraphsTuple( n_node=np.asarray([NUM_NODES]), n_edge=np.asarray([NUM_EDGES]), nodes=np.random.normal(size=[NUM_NODES, EMBEDDING_SIZE]), edges=np.random.normal(size=[NUM_EDGES, EMBEDDING_SIZE]), globals=None, senders=np.random.randint(0, NUM_NODES, [NUM_EDGES]), receivers=np.random.randint(0, NUM_NODES, [NUM_EDGES])) def network_definition(graph: jraph.GraphsTuple) -> jraph.ArrayTree: """`InteractionNetwork` with an LSTM in the edge update.""" # LSTM that will keep a memory of the inputs to the edge model. edge_fn_lstm = hk.LSTM(hidden_size=HIDDEN_SIZE) # MLPs used in the edge and the node model. Note that in this instance # the output size matches the input size so the same model can be run # iteratively multiple times. In a real model, this would usually be achieved # by first using an encoder in the input data into a common `EMBEDDING_SIZE`. edge_fn_mlp = hk.nets.MLP([HIDDEN_SIZE, EMBEDDING_SIZE]) node_fn_mlp = hk.nets.MLP([HIDDEN_SIZE, EMBEDDING_SIZE]) # Initialize the edge features to contain both the input edge embedding # and initial LSTM state. Note for the nodes we only have an embedding since # in this example nodes do not use a `node_fn_lstm`, but for analogy, we # still put it in a `StatefulField`. graph = graph._replace( edges=StatefulField( embedding=graph.edges, state=edge_fn_lstm.initial_state(graph.edges.shape[0])), nodes=StatefulField(embedding=graph.nodes, state=None), ) def update_edge_fn(edges, sender_nodes, receiver_nodes): # We will run an LSTM memory on the inputs first, and then # process the output of the LSTM with an MLP. edge_inputs = jnp.concatenate([edges.embedding, sender_nodes.embedding, receiver_nodes.embedding], axis=-1) lstm_output, updated_state = edge_fn_lstm(edge_inputs, edges.state) updated_edges = StatefulField( embedding=edge_fn_mlp(lstm_output), state=updated_state, ) return updated_edges def update_node_fn(nodes, received_edges): # Note `received_edges.state` will also contain the aggregated state for # all received edges, which we may choose to use in the node update. node_inputs = jnp.concatenate( [nodes.embedding, received_edges.embedding], axis=-1) updated_nodes = StatefulField( embedding=node_fn_mlp(node_inputs), state=None) return updated_nodes recurrent_graph_network = jraph.InteractionNetwork( update_edge_fn=update_edge_fn, update_node_fn=update_node_fn) # Apply the model recurrently for 10 message passing steps. # If instead we intended to use the LSTM to process a sequence of features # for each node/edge, here we would select the corresponding inputs from the # sequence along the sequence axis of the nodes/edges features to build the # correct input graph for each step of the iteration. num_message_passing_steps = 10 for _ in range(num_message_passing_steps): graph = recurrent_graph_network(graph) return graph def main(_): network = hk.without_apply_rng(hk.transform(network_definition)) input_graph = get_random_graph() params = network.init(jax.random.PRNGKey(42), input_graph) output_graph = network.apply(params, input_graph) print(tree.tree_map(lambda x: x.shape, output_graph)) if __name__ == "__main__": app.run(main)
ulfalizer/Kconfiglib
listnewconfig.py
Python
isc
2,619
0
#!/usr/bin/env python3 # Copyright (c) 2018-2019, Ulf Magnusson # SPDX-License-Identifier: ISC """ Lists all user-modifiable symbols that are not given a value in the configuration file. Usually, these are new symbols that have been added to the Kconfig files. The default configuration filename is '.config'. A different filename can be passed in the KCONFIG_CONFIG environment variable. """ from __future__ import print_function import argparse import sys from kconfiglib import Kconfig, BOOL, TRISTATE, INT, HEX, STRING, TRI_TO_STR def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=__doc__) parser.add_argument( "--show-help", "-l", action="store_true", help="Show any help texts as well") parser.add_argument( "kconfig", metavar="KCONFIG", nargs="?", default="Kconfig", help="Top-level Kconfig file (default: Kconfig)") args = parser.parse_args() kconf = Kconfig(args.kconfig, s
uppress_traceback=True) # Make it possible to filter this message out print(kconf.load_config(), file=sys.stderr) for sym in kconf.unique_defined_syms: # Only sho
w symbols that can be toggled. Choice symbols are a special # case in that sym.assignable will be (2,) (length 1) for visible # symbols in choices in y mode, but they can still be toggled by # selecting some other symbol. if sym.user_value is None and \ (len(sym.assignable) > 1 or (sym.visibility and (sym.orig_type in (INT, HEX, STRING) or sym.choice))): # Don't reuse the 'config_string' format for bool/tristate symbols, # to show n-valued symbols as 'CONFIG_FOO=n' instead of # '# CONFIG_FOO is not set'. This matches the C tools. if sym.orig_type in (BOOL, TRISTATE): s = "{}{}={}\n".format(kconf.config_prefix, sym.name, TRI_TO_STR[sym.tri_value]) else: s = sym.config_string print(s, end="") if args.show_help: for node in sym.nodes: if node.help is not None: # Indent by two spaces. textwrap.indent() is not # available in Python 2 (it's 3.3+). print("\n".join(" " + line for line in node.help.split("\n"))) break if __name__ == "__main__": main()
salas106/irc-ltl-framework
utils/queue.py
Python
mit
352
0.005682
# -*- coding: utf8 -*- """ The ``queue` utils ================== Some op
eration will require a queue. This utils file """ __author__ = 'Salas' __copyright__ = 'Copyright 2014 LTL' __credits__ = ['Salas'] __license__ = 'MIT' __version__ = '0.2.0' __maintainer__ = 'S
alas' __email__ = 'Salas.106.212@gmail.com' __status__ = 'Pre-Alpha'
eroicaleo/LearningPython
interview/leet/713_Subarray_Product_Less_Than_K.py
Python
mit
434
0.009217
#!/usr/bin/
env python3 class Solution(): def numSubarrayProductLessThanK(self, nums, k): lo, prod = 0, 1 ret = 0
for hi, n in enumerate(nums): prod *= n while lo <= hi and prod >= k: prod //= nums[lo] lo += 1 ret += (hi-lo+1) return ret nums = [10,5,2,6] k = 100 sol = Solution() print(sol.numSubarrayProductLessThanK(nums, k))
extremoburo/django-jquery-file-upload
fileupload/serialize.py
Python
mit
1,030
0.001942
# encoding: utf
-8 import mimetypes import re from django.core.urlresolvers import reverse def order_name(name): """order_name -- Limit a text to 20 chars length, if necessary strips the middle of the text and substitute it for an ellipsis. name -- text to be limited. """ name = re.sub(r'^.*/', '', name) if len(name) <= 37: return name return name[:37] + "..." + name[-7:] def serialize(instance, file_attr='file'): """serialize -- Serialize
a File instance into a dict. instance -- File instance file_attr -- attribute name that contains the FileField or ImageField """ obj = getattr(instance, file_attr) return { 'url': obj.url, 'name': order_name(obj.name), #'type': mimetypes.guess_type(obj.path)[0] or 'image/png', 'type': mimetypes.guess_type(obj.path)[0], 'thumbnailUrl': obj.url, 'size': obj.size, 'deleteUrl': reverse('upload-delete', args=[instance.pk]), 'deleteType': 'DELETE', }
sunqm/psi4-cc
psi4/__init__.py
Python
gpl-2.0
434
0
''' ps = psi4.Solver with psi4.quit
e_run(): ps.prepare_chkpt(mo_coeff, fock_on_mo, nelec, e_scf, nuclear_repulsion) ecc = ps.energy('CCSD', c.shape[1], hcore_on_mo, eri_on_mo) rdm1, rdm2 =
ps.density(mo_coeff.shape[1]) eccsdt = ps.energy('CCSD(T)', c.shape[1], hcore_on_mo, eri_on_mo) rdm1, rdm2 = ps.density(mo_coeff.shape[1]) ''' from wrapper import * __all__ = filter(lambda s: not s.startswith('_'), dir())
sbarbett/ip_intelligence
src/check_json.py
Python
apache-2.0
945
0.012698
# Copyright 2017 NeuStar, Inc.All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "Licens
e"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the L
icense is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. class CheckJSON: def __init__(self, key, obj, ip_info=False): self.key_error = 'Field is not applicable to this license.' if ip_info is True: self.key_error = 'No IP info returned.' self.key = key self.obj = obj def key_valid(self): if self.key not in self.obj: raise KeyError(self.key_error) else: return self.obj[self.key]
crmccreary/openerp_server
openerp/addons/product_manufacturer/__openerp__.py
Python
agpl-3.0
1,833
0.004364
############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { "name" : "Products Manufacturers", "version" : "1.0", "author" : "O
penERP SA", 'category': 'Purchase Management', 'complexity': "easy", "depends" : ["stock"], "init_xml" : [], "demo_xml" : [], "description": """ A module that adds manufacturers and attributes on the product form. ==================================================================== You can now define the following for a product: * Manufacturer * M
anufacturer Product Name * Manufacturer Product Code * Product Attributes """, "update_xml" : [ "security/ir.model.access.csv", "product_manufacturer_view.xml" ], "auto_install": False, "installable": True, "certificate" : "00720153953662760781", 'images': ['images/products_manufacturer.jpeg'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
bgris/ODL_bgris
lib/python3.5/site-packages/odl/tomo/backends/scikit_radon.py
Python
gpl-3.0
4,830
0
# Copyright 2014-2016 The ODL development group # # This file is part of ODL. # # ODL is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ODL is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ODL. If not, see <http://www.gnu.org/licenses/>. """Radon transform (ray transform) in 2d using skimage.transform.""" from odl.discr import uniform_discr_frompartition, uniform_partition import numpy as np try: from skimage.transform import radon, iradon SCIKIT_IMAGE_AVAILABLE = True except ImportError: SCIKIT_IMAGE_AVAILABLE = False __all__ = ('scikit_radon_forward', 'scikit_radon_back_projector', 'SCIKIT_IMAGE_AVAILABLE') def scikit_theta(geometry): """Calculate angles in degrees with ODL scikit conventions.""" return np.asarray(geometry.motion_grid).squeeze() * 180.0 / np.pi def scikit_sinogram_space(geometry, volume_space, sinogram_space): """Create a range adapted to the scikit radon geometry.""" padded_size = int(np.ceil(volume_space.shape[0] * np.sqrt(2))) det_width = volume_space.domain.extent()[0] * np.sqrt(2) scikit_detector_part = uniform_partition(-det_width / 2.0, det_width / 2.0, padded_size) scikit_range_part = geometry.motion_partition.insert(1, scikit_detector_part) scikit_range = uniform_discr_frompartition(scikit_range_part, interp=sinogram_space.interp, dtype=sinogram_space.dtype) return scikit_range def clamped_interpolation(scikit_range, sinogram): """Interpolate in a possibly smaller space Sets all points that would be outside ofthe domain to match the boundary values. """ min_x = scikit_range.domain.min()[1] max_x = scikit_range.domain.max()[1] def interpolation_wrapper(x): x = (x[0], np.maximum(min_x, np.minimum(max_x, x[1]))) return sinogram.interpolation(x) return interpolation_wrapper def scikit_radon_forward(volume, geometry, range, out=None): """Calculate forward projection using scikit Parameters ---------- volume : `DiscreteLpElement` The volume to project geometry : `Geometry` The projection geometry to use range : `DiscreteLp` range of this projection (sinogram space) out : ``range`` element, optional An element in range that the result should be written to Returns ------- sinogram : ``range`` element Sinogram given by the projection. """ # Check basic requirements. Fully checking should be in wrapper assert volume.shape[0] == volume.shape[1] theta = scikit_the
ta(geometry) scikit_range = scikit_sinogram_space(geometry, volu
me.space, range) sinogram = scikit_range.element(radon(volume.asarray(), theta=theta).T) if out is None: out = range.element() out.sampling(clamped_interpolation(scikit_range, sinogram)) scale = volume.space.cell_sides[0] out *= scale return out def scikit_radon_back_projector(sinogram, geometry, range, out=None): """Calculate forward projection using scikit Parameters ---------- sinogram : `DiscreteLpElement` Sinogram (projections) to backproject. geometry : `Geometry` The projection geometry to use. range : `DiscreteLp` range of this projection (volume space). out : ``range`` element, optional An element in range that the result should be written to. Returns ------- sinogram : ``range`` element Sinogram given by the projection. """ theta = scikit_theta(geometry) scikit_range = scikit_sinogram_space(geometry, range, sinogram.space) scikit_sinogram = scikit_range.element() scikit_sinogram.sampling(clamped_interpolation(range, sinogram)) if out is None: out = range.element() else: # Only do asserts here since these are backend functions assert out in range out[:] = iradon(scikit_sinogram.asarray().T, theta, output_size=range.shape[0], filter=None) # Empirically determined value, gives correct scaling scale = 4.0 * float(geometry.motion_params.length) / (2 * np.pi) out *= scale return out
Monithon/Monithon-2.0
campaigns/migrations/0003_auto_20141130_0858.py
Python
gpl-2.0
1,412
0.003541
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('campaigns', '0002_auto_20141022_1840'), ] operations = [ migrations.AlterField( model_name='campaignform', name='form', field=models.ForeignKey(related_name='campaigns', to='customforms.Form'), preserve_default=True, ), migrations.AlterField( model_name='campaignproject', name='campaign', field=models.ForeignKey(related_name='projects', to='campaigns.Campaign'), preserve_default=True, ), migrations.AlterField( model_name='camp
aignproject', name='project', field=models.ForeignKey(related_name='campaigns', to='projects.Monitorable'), preserve_default=True, ), migrations.AlterField( model_name='campaignreport', name='campaign', field=models.ForeignKey(related_name='reports', to='campaigns.Campaign'), preserve_default=True, ), migrations.AlterField( model_name='campaignrepor
t', name='report', field=models.ForeignKey(related_name='campaigns', to='reports.Report'), preserve_default=True, ), ]
jimmyraywv/cloud-custodian
tests/test_offhours.py
Python
apache-2.0
21,903
0.000274
# Copyright 2015-2017 Capital One Services, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function, unicode_literals import datetime import json import os from dateutil import zoneinfo from mock import mock from .common import BaseTest, instance from c7n.filters import FilterValidationError from c7n.filters.offhours import OffHour, OnHour, ScheduleParser, Time # Per http://blog.xelnor.net/python-mocking-datetime/ # naive implementation has issues with pypy real_datetime_class = datetime.datetime def mock_datetime_now(tgt, dt): class DatetimeSubclassMeta(type): @classmethod def __instancecheck__(mcs, obj): return isinstance(obj, real_datetime_class) class BaseMockedDatetime(real_datetime_class): target = tgt @classmethod def now(cls, tz=None): return cls.target.replace(tzinfo=tz) @classmethod def utcnow(cls): return cls.target # Python2 & Python3 compatible metaclass MockedDatetime = DatetimeSubclassMeta( b'datetime' if str is bytes else 'datetime', # hack Python2/3 port (BaseMockedDatetime,), {}) return mock.patch.object(dt, 'datetime', MockedDatetime) class OffHoursFilterTest(BaseTest): """[off|on] hours testing""" def test_offhours_records(self): session_factory = self.replay_flight_data('test_offhours_records') t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2016, month=8, day=14, hour=19, minute=00) with mock_datetime_now(t, datetime): p = self.load_policy({ 'name': 'offhours-records', 'resource': 'ec2', 'filters': [ {'State.Name': 'running'}, {'type': 'offhour', 'offhour': 19, 'tag': 'custodian_downtime', 'default_tz': 'est', 'weekends': False}] }, session_factory=session_factory) resources = p.run() self.assertEqual(resources, []) with open(os.path.join( p.options['output_dir'], 'offhours-records', 'parse_errors.json')) as fh: data = json.load(fh) self.assertEqual(len(data), 1) self.assertEqual(data[0][0], 'i-0ee3a9bc2eeed269f') self.assertEqual(data[0][1], 'off=[m-f,8];on=[n-f,5];pz=est') with open(os.path.join( p.options['output_dir'], 'offhours-records', 'opted_out.json')) as fh: data = json.load(fh) self.assertEqual(len(data), 1) self.assertEqual(data[0]['InstanceId'], 'i-0a619b58a7e704a9f') def test_validate(self): self.assertRaises( FilterValidationError, OffHour({'default_tz': 'zmta'}).validate) self.assertRaises( FilterValidationError, OffHour({'offhour': 25}).validate) i = OffHour({}) self.assertEqual(i.validate(), i) def test_process(self): f = OffHour({'opt-out': True}) instances = [ instance(Tags=[]), instance( Tags=[{'Key': 'maid_offhours', 'Value': ''}]), instance( Tags=[{'Key': 'maid_offhours', 'Value': 'on'}]), instance( Tags=[{'Key': 'maid_offhours', 'Value': 'off'}]), instance( Tags=[ {'Key': 'maid_offhours', 'Value': "off=(m-f,5);zebrablue,on=(t-w,5)"}])] t = datetime.datetime( year=2015, month=12, day=1, hour=19, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) with mock_datetime_now(t, datetime): self.assertEqual( f.process(instances), [instances[0], instances[1], instances[2]] ) def test_opt_out_behavior(self): # Some users want to match based on policy filters to # a resource subset with default opt out behavior t = datetime.datetime( year=2015, month=12, day=1, hour=19, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) f = OffHour({'opt-out': True}) with mock_datetime_now(t, datetime): i = instance(Tags=[]) self.assertEqual(f(i), True) i = instance( Tags=[{'Key': 'maid_offhours', 'Value': ''}]
) self.assertEqual(f(i), True) i = instance( Tags=[{'Key': 'maid_offhours', 'Value': 'on'}] ) self.assertEqual(f(i), True) i = instance( Tags=[{'Key': 'maid_offhours', 'Value': 'off'}]) self.assertEqual(f(i), False) self.assertEqual(f.opted_out, [i]) def test_opt_in_behavior(self): # Given the addition
of opt out behavior, verify if its # not configured that we don't touch an instance that # has no downtime tag i = instance(Tags=[]) i2 = instance(Tags=[{'Key': 'maid_offhours', 'Value': ''}]) i3 = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'on'}]) t = datetime.datetime( year=2015, month=12, day=1, hour=19, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) f = OffHour({}) with mock_datetime_now(t, datetime): self.assertEqual(f(i), False) self.assertEqual(f(i2), True) self.assertEqual(f(i3), True) t = datetime.datetime( year=2015, month=12, day=1, hour=7, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) f = OnHour({}) with mock_datetime_now(t, datetime): self.assertEqual(f(i), False) self.assertEqual(f(i2), True) self.assertEqual(f(i3), True) def xtest_time_match_stops_after_skew(self): hour = 7 t = datetime.datetime( year=2015, month=12, day=1, hour=hour, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) i = instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'tz=est'}]) f = OnHour({'skew': 1}) results = [] with mock_datetime_now(t, datetime) as dt: for n in range(0, 4): dt.target = t.replace(hour=hour + n) results.append(f(i)) self.assertEqual(results, [True, True, False, False]) def test_resource_schedule_error(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2015, month=12, day=1, hour=19, minute=5) f = OffHour({}) f.process_resource_schedule = lambda: False with mock_datetime_now(t, datetime): i = instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'tz=est'}]) self.assertEqual(f(i), False) def test_time_filter_usage_errors(self): self.assertRaises(NotImplementedError, Time, {}) def test_everyday_onhour(self): # weekends on means we match times on the weekend start_day = 14 # sunday t = datetime.datetime( year=2016, day=start_day, month=8, hour=7, minute=20) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}]) f = OnHour({'weekends': False}) results = [] with mock_datetime_now(t, datetime) as dt: for n in range(7): dt.target = t.replace(day=start_day + n) results.append(f(i)) self.assertEqual(results, [True] * 7) de
degiacom/assemble
ForceField.py
Python
gpl-3.0
4,345
0.023245
# Copyright (c) 2014-2018 Matteo Degiacomi and Valentina Erastova # # Assemble is free software ; # you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation ; # either version 2 of the License, or (at your option) any later version. # Assemble is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY ; # without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # You should have received a copy of the GNU General Public License along with Assemble ; # if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. # # Authors : Matteo Degiacomi, matteo.degiacomi@gmail.com, Valentina Erastova, valentina.erastova@gmail.com import numpy as np import logging class ForceField(object): def __init__(self): self.bonded={} self.nonbonded={} self.combination=[] self.fftype=[] #default return values self.default_bond=1.5 self.default_angle=114 self.default_dihedral=120 self.logger=logging.getLogger('assemble') def load(self,fffile): self.logger.info("\n> loading force field %s..."%fffile) f = open(fffile, 'r+') line = f.readline() while line: w=line.split() if "bondedtypes" in line: break line=f.readline() #extract equations type while line: w=line.split() if not line.isspace() and not ";" in w[0] and len(w)==4: self.fftype=np.array(w).astype(int) break line=f.readline() line=f.readline() #extract bonded potential constants while line: w=line.split() if "atomtypes" in line: break if not line.isspace() and not ";" in w[0]: self.bonded[w[0]]=np.array(w[1:]).astype(float) line=f.readline() line=f.readline() #non bonded potential while line: w=line.split() if "defaults" in line: break if not line.isspace() and not ";" in w[0]: self.nonbonded[w[0]]=np.array(w[1:]) line=f.readline() line=f.readline() #get combination rules while line: w=line.split() if not line.isspace() and not ";" in w[0]: self.combination=np.array(w) break line=f.readline() f.close() if len(self.fftype)==0: raise IOError("bond types not found in force field %s!"%fffile) if len(self.bonded)==0: raise IOError("bonded parameters not found in force field %s!"%fffile) if len(self.nonbonded)==0: raise IOError("non-bonded parameters not found in force field %s!"%fffile) if len(self.combination)==0: raise IOError("combination rules not found in force field %s!"%fffile) def get_bond(self,name): if self.fftype[0]>=1 and self.fftype[0]<=7: return self.bonded[name][0]*10 #tabulated potential, no way to
know where the minimum is. Return default value else: return self.default_bond def get_angle(self,name): if self.fftype[1]>=1 and self.fftype[1]<=2: return self.bonded[name][0] #no analytical minimum exists, return default else: return self.default_angle def get_dihedral(self,name): if self.fftype[2]>=1 and self.fftype
[2]<=2: return self.bonded[name][0] #no analytical minimum exists, return default else: return self.default_dihedral if __name__=="__main__": FF=ForceField() FF.load("./database/forcefield/trappe.ff.txt") #FF.load("C:\Users\Matteo\workspace\polymer\database\forcefield\trappe.ff") print(FF.bonded) #print FF.nonbonded #print FF.combination print(FF.fftype)
techinc/imagepusher
munch.py
Python
mit
651
0.058372
import imagepusher, random if __name__ == '__main__': host, port = '', 18002 pusher = imagepusher.ImagePusher( (host, port) ) width, height
= 12, 10 munch = [ [ [0,0,0] for x in xrange(width) ] for y in xrange(height) ] while True: for i in xrange(16): for j in xrange(i+1): for y in xrange(height): for x in xrange(width): if y == (x ^ j): munch[y][x]
[0] += 1 munch[y][x][0] %= 256 munch[y][x][1] += 5 munch[y][x][1] %= 256 munch[y][x][2] += 9 munch[y][x][2] %= 256 frame = [ [ [n/255., m/255., o/255.] for n,m,o in row ] for row in munch ] pusher.push_frame( frame )
jvictor0/TweetTracker
src/TwitterRank/CheckGraph.py
Python
mit
1,247
0.00401
import sys, os, re, time, resource, gc import ujson, boto import boto.s3.connection from collections import defaultdict access_key = os.environ["AWS_ACCESS_KEY"] secret_key = os.environ["AWS_SECRET_KEY"] def key_iterator(key): """ Iterator for line by line, for going through the whole contents of a key """ unfinished_line = "" for byte in key: byte = unfinished_line + byte lines = byte.split("\n") unfinished_line = lines.pop() for line in lines: yield line def main(args): """ Main method Rolling like it's 2006 """ conn = boto.connect_s3(
aws_access_key_id=access_key, aws_secret_access_key=secret_key) bucket = conn.get_bucket("tweettrack") count = 0 keys = bucket.list("Twitterrank_Full_Output/graph") for f in keys: print f f_iter = key_iterator(f) for line in f_iter: count += 1 if line.find("score") == -1: print "
fuck this shit, we fucked up" print line sys.exit(0) if count % 50000 == 0: print "count is: %d" % (count,) conn.close() if __name__ == "__main__": main(sys.argv)
SimpleGeometry/bisv-ml
tensorflow-2/tf_linreg.py
Python
mit
2,049
0.028306
#import libraries import tensorflow as tf import numpy as np def get_data(): #data is from the computer hardware dataset found on the UCI ML repository with open('data.txt', 'r') as fin: text_in = fin.read() split = text_in.splitlines() data = [] for line in split: data.append(line.split(',')) np_data = np.array(data) x = np_data[:, 2:8].astype('f4') y = np_data[:, 8].astype('f4') #normalize features of x x_mean = np.mean(x, 0) x_std = np.std(x, 0) x = (x - x_mean) / x_std return x, y def tf_summary(): if tf.gfile.Exists("summary"): tf.gfile.DeleteRecursively("summary") tf.summary.scalar('cost', cost) tf.summary.histogram('weights', w) tf.summary.histogram('bias', b) summary = tf.summary.merge_all() writer = tf.summary.FileWriter("summary") writer.add_graph(sess.graph) return summary, writer #get data x_data, y_data = get_data()
n_examples = np.shape(x_data)[0] n_features =
np.shape(x_data)[1] x_data = np.transpose(x_data) y_data = np.reshape(y_data, [1, n_examples]) ############################## YOUR CODE HERE ##################################### ''' Replace all the quotes/variables in quotes with the correct code ''' #declare graph #1: declare placeholders x and y (to hold data) x = 'x' y = 'y' #2: declare variables w (weights) and b (bias) w = 'w' b = 'b' #3: declare operations and output (multiplication) h = 'h' #declare cost function cost = 'cost' #declare optimizer and learning rate learning_rate = 'learning rate' optimizer = 'optimizer' #run graph with tf.Session() as sess: sess.run(tf.global_variables_initializer()) #tensorboard stuff summary, writer = tf_summary() #train model iterations = 'iterations' for i in range(iterations): #fill in var1, 2, 3 with the correct code sess.run('var1', feed_dict={x: 'var2', y: 'var3'}) #this is for logging the results to tensorboard so you can visualize them (i % 10 == 0 says to log the result every 10 iterations) if i % 10 == 0: writer.add_summary(sess.run(summary, feed_dict={x: 'var2', y: 'var3'}))
jupyterhub/oauthenticator
oauthenticator/mediawiki.py
Python
bsd-3-clause
4,178
0.000957
""" Custom Authenticator to use MediaWiki OAuth with JupyterHub Requires `mwoauth` package. """ import json import os from asyncio import wrap_future from concurrent.futures import ThreadPoolExecutor from jupyterhub.handlers import BaseHandler from jupyterhub.utils import url_path_join from mwoauth import ConsumerToken from mwoauth import Handshaker from mwoauth.tokens import RequestToken from traitlets import Any from traitlets import Integer from traitlets import Unicode from oauthenticator import OAuthCallbackHandler from oauthenticator import OAuthenticator # Name of cookie used to pass auth token between the oauth # login and authentication phase AUTH_REQUEST_COOKIE_NAME = 'mw_oauth_request_token_v2' # Helpers to jsonify/de-jsonify request_token # It is a named tuple with
bytestrings, json.dumps balks def jsonify(request_token): return json.dumps( [ request_token.key, request_to
ken.secret, ] ) def dejsonify(js): key, secret = json.loads(js) return RequestToken(key, secret) class MWLoginHandler(BaseHandler): async def get(self): consumer_token = ConsumerToken( self.authenticator.client_id, self.authenticator.client_secret, ) handshaker = Handshaker(self.authenticator.mw_index_url, consumer_token) redirect, request_token = await wrap_future( self.authenticator.executor.submit(handshaker.initiate) ) self.set_secure_cookie( AUTH_REQUEST_COOKIE_NAME, jsonify(request_token), expires_days=1, path=url_path_join(self.base_url, 'hub', 'oauth_callback'), httponly=True, ) self.log.info('oauth redirect: %r', redirect) self.redirect(redirect) class MWCallbackHandler(OAuthCallbackHandler): """ Override OAuthCallbackHandler to take out state parameter handling. mwoauth doesn't seem to support it for now! """ def check_arguments(self): pass def get_state_url(self): return None class MWOAuthenticator(OAuthenticator): login_service = 'MediaWiki' login_handler = MWLoginHandler callback_handler = MWCallbackHandler mw_index_url = Unicode( os.environ.get('MW_INDEX_URL', 'https://meta.wikimedia.org/w/index.php'), config=True, help='Full path to index.php of the MW instance to use to log in', ) executor_threads = Integer( 12, help="""Number of executor threads. MediaWiki OAuth requests happen in this thread, so it is mostly waiting for network replies. """, config=True, ) executor = Any() def normalize_username(self, username): """ Override normalize_username to avoid lowercasing usernames """ return username def _executor_default(self): return ThreadPoolExecutor(self.executor_threads) async def authenticate(self, handler, data=None): consumer_token = ConsumerToken( self.client_id, self.client_secret, ) handshaker = Handshaker(self.mw_index_url, consumer_token) request_token = dejsonify(handler.get_secure_cookie(AUTH_REQUEST_COOKIE_NAME)) handler.clear_cookie(AUTH_REQUEST_COOKIE_NAME) access_token = await wrap_future( self.executor.submit( handshaker.complete, request_token, handler.request.query ) ) identity = await wrap_future( self.executor.submit(handshaker.identify, access_token) ) if identity and 'username' in identity: # this shouldn't be necessary anymore, # but keep for backward-compatibility return { 'name': identity['username'].replace(' ', '_'), 'auth_state': { 'ACCESS_TOKEN_KEY': access_token.key, 'ACCESS_TOKEN_SECRET': access_token.secret, 'MEDIAWIKI_USER_IDENTITY': identity, }, } else: self.log.error("No username found in %s", identity)
cmorgan/toyplot
toyplot/png.py
Python
bsd-3-clause
2,849
0.002457
# Copyright 2014, Sandia Corporation. Under the terms of Contract # DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains certain # rights in this software. from __future__ import absolute_import from __future__ import division import toyplot.cairo.png def render(canvas, fobj=None, width=None, height=None, scale=None): """Render the PNG bitmap representation of a canvas. By default, canvas dimensions in CSS pixels are mapped directly to pixels in the output PNG image. Use o
ne of `width`, `height`, or `scale` to override this behavior. Parameters ---------- canvas: :class:`toyplot.canvas.Canvas` Canvas to be rendered. fobj: file-like obje
ct or string, optional The file to write. Use a string filepath to write data directly to disk. If `None` (the default), the PNG data will be returned to the caller instead. width: number, optional Specify the width of the output image in pixels. height: number, optional Specify the height of the output image in pixels. scale: number, optional Ratio of output image pixels to `canvas` pixels. Returns ------- png: PNG image data, or `None` PNG representation of `canvas`, or `None` if the caller specifies the `fobj` parameter. Notes ----- The output PNG is currently rendered using :func:`toyplot.cairo.png.render()`. This may change in the future. """ return toyplot.cairo.png.render(canvas, fobj, width, height, scale) def render_frames(canvas, width=None, height=None, scale=None): """Render a canvas as a sequence of PNG images. By default, canvas dimensions in CSS pixels are mapped directly to pixels in the output PNG images. Use one of `width`, `height`, or `scale` to override this behavior. Parameters ---------- canvas: :class:`toyplot.canvas.Canvas` Canvas to be rendered. width: number, optional Specify the width of the output image in pixels. height: number, optional Specify the height of the output image in pixels. scale: number, optional Ratio of output image pixels to `canvas` pixels. Returns ------- frames: Python generator expression that returns each PNG image in the sequence. The caller must iterate over the returned frames and is responsible for all subsequent processing, including disk I/O, video compression, etc. Notes ----- The output PNG images are currently rendered using :func:`toyplot.cairo.png.render_frames()`. This may change in the future. Examples -------- >>> for frame, png in enumerate(toyplot.cairo.render_png_frames(canvas)): ... open("frame-%s.png" % frame, "wb").write(png) """ return toyplot.cairo.png.render_frames(canvas, width, height, scale)
endlessm/chromium-browser
third_party/llvm/lldb/test/API/functionalities/data-formatter/data-formatter-stl/libcxx/list/loop/TestDataFormatterLibcxxListLoop.py
Python
bsd-3-clause
2,446
0.002044
""" Test that the debugger handles loops in std::list (which can appear as a result of e.g. memory corruption). """ import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil class LibcxxListDataFormat
terTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) NO_DEBUG_INFO_TESTCASE = True @add_test_categories(["libc++"]) @expectedFailureAndroid(bugnumber="llvm.org/pr32592") def test_with_run_command(self): self.build() exe = self.getBuildArtifact("
a.out") target = self.dbg.CreateTarget(exe) self.assertTrue(target and target.IsValid(), "Target is valid") file_spec = lldb.SBFileSpec("main.cpp", False) breakpoint1 = target.BreakpointCreateBySourceRegex( '// Set break point at this line.', file_spec) self.assertTrue(breakpoint1 and breakpoint1.IsValid()) breakpoint2 = target.BreakpointCreateBySourceRegex( '// Set second break point at this line.', file_spec) self.assertTrue(breakpoint2 and breakpoint2.IsValid()) # Run the program, it should stop at breakpoint 1. process = target.LaunchSimple( None, None, self.get_process_working_directory()) self.assertTrue(process and process.IsValid(), PROCESS_IS_VALID) self.assertEqual( len(lldbutil.get_threads_stopped_at_breakpoint(process, breakpoint1)), 1) # verify our list is displayed correctly self.expect( "frame variable *numbers_list", substrs=[ '[0] = 1', '[1] = 2', '[2] = 3', '[3] = 4', '[5] = 6']) # Continue to breakpoint 2. process.Continue() self.assertTrue(process and process.IsValid(), PROCESS_IS_VALID) self.assertEqual( len(lldbutil.get_threads_stopped_at_breakpoint(process, breakpoint2)), 1) # The list is now inconsistent. However, we should be able to get the first three # elements at least (and most importantly, not crash). self.expect( "frame variable *numbers_list", substrs=[ '[0] = 1', '[1] = 2', '[2] = 3']) # Run to completion. process.Continue() self.assertEqual(process.GetState(), lldb.eStateExited, PROCESS_EXITED)
vivekanand1101/python-fedora
fedora/client/wiki.py
Python
gpl-2.0
9,280
0.000862
#!/usr/bin/python -tt # -*- coding: utf-8 -*- # # Copyright 2008-2009 Red Hat, Inc. # This file is part of python-fedora # # python-fedora is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # python-fedora is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with python-fedora; if not, see <http://www.gnu.org/licenses/> # ''' A Wiki Client This interface is *deprecated*. Please use resources recommended by upstream instead: https://www.mediawiki.org/wiki/API:Client_code#Python .. moduleauthor:: Luke Macken <lmacken@redhat.com> .. moduleauthor:: Toshio Kuratomi <tkuratom@redhat.com> .. moduleauthor:: Ian Weller <ian@ianweller.org> ''' from __future__ import print_function from datetime import datetime, timedelta import time import warnings from kitchen.text.converters import to_bytes from fedora.client import BaseClient, AuthError from fedora import _ MEDIAWIKI_DATEFORMAT = "%Y-%m-%dT%H:%M:%SZ" class Wiki(BaseClient): api_high_limits = False def __init__(self, base_url='https://fedoraproject.org/w/', *args, **kwargs): super(Wiki, self).__init__(base_url, *args, **kwargs) warnings.warn( "The Wiki client is deprecated. Please use resources " "recommended by upstream instead: https://www.mediawiki." "org/wiki/API:Client_code#Python") def get_recent_changes(self, now, then, limit=500): """ Get recent wiki changes from `now` until `then` """ data = self.send_request(
'api.php', req_params={ 'list': 'recentchanges', 'action': 'query', 'format': 'json', 'rcprop': 'user|title', 'rcend': then.isoformat().split('.')[0] + 'Z',
'rclimit': limit, }) if 'error' in data: raise Exception(data['error']['info']) return data['query']['recentchanges'] def login(self, username, password): data = self.send_request('api.php', req_params={ 'action': 'login', 'format': 'json', 'lgname': username, 'lgpassword': password, }) if 'lgtoken' not in data.get('login', {}): raise AuthError( 'Login failed: %(data)s' % { 'data': to_bytes(data) }) #self.session_id = data['login']['lgtoken'] #self.username = data['login']['lgusername'] self.check_api_limits() return data def check_api_limits(self): """ Checks whether you have the 'apihighlimits' right or not. """ data = self.send_request('api.php', req_params={ 'action': 'query', 'meta': 'userinfo', 'uiprop': 'rights', 'format': 'json', }) self.api_high_limits = "apihighlimits" in \ data['query']['userinfo']['rights'] return self.api_high_limits def print_recent_changes(self, days=7, show=10): now = datetime.utcnow() then = now - timedelta(days=days) print(_(u"From %(then)s to %(now)s") % {'then': then, 'now': now}) changes = self.get_recent_changes(now=now, then=then) num_changes = len(changes) print(_(u"%d wiki changes in the past week") % num_changes) if num_changes == 500: print(_( u"""Warning: Number of changes reaches the API return limit. You will not get the complete list of changes unless you run this script using a 'bot' account.""")) users = {} pages = {} for change in changes: users.setdefault(change['user'], []).append(change['title']) pages[change['title']] = pages.setdefault(change['title'], 0) + 1 print(_(u'\n== Most active wiki users ==')) for user, changes in sorted(users.items(), cmp=lambda x, y: cmp(len(x[1]), len(y[1])), reverse=True)[:show]: print(u' %-50s %d' % (('%s' % user).ljust(50, '.'), len(changes))) print(_(u'\n== Most edited pages ==')) for page, num in sorted(pages.items(), cmp=lambda x, y: cmp(x[1], y[1]), reverse=True)[:show]: print(u' %-50s %d' % (('%s' % page).ljust(50, '.'), num)) def fetch_all_revisions(self, start=1, flags=True, timestamp=True, user=True, size=False, comment=True, content=False, title=True, ignore_imported_revs=True, ignore_wikibot=False, callback=None): """ Fetch data for all revisions. This could take a long time. You can start at a specific revision by modifying the 'start' keyword argument. To ignore revisions made by "ImportUser" and "Admin" set ignore_imported_revs to True (this is the default). To ignore edits made by Wikibot set ignore_wikibot to True (False is the default). Modifying the remainder of the keyword arguments will return less/more data. """ # first we need to get the latest revision id change = self.send_request( 'api.php', req_params={ 'list': 'recentchanges', 'action': 'query', 'format': 'json', 'rcprop': 'ids', 'rclimit': 1, 'rctype': 'edit|new', } ) latest_revid = change['query']['recentchanges'][0]['revid'] # now we loop through all the revisions we want rvprop_list = { 'flags': flags, 'timestamp': timestamp, 'user': True, 'size': size, 'comment': comment, 'content': content, 'ids': True, } rvprop = '|'.join([key for key in rvprop_list if rvprop_list[key]]) revs_to_get = list(range(start, latest_revid)) all_revs = {} if self.api_high_limits: limit = 500 else: limit = 50 for i in range(0, len(revs_to_get), limit): revid_list = revs_to_get[i:i+limit] revid_str = '|'.join([str(rev) for rev in revid_list]) data = self.send_request( 'api.php', req_params={ 'action': 'query', 'prop': 'revisions', 'rvprop': rvprop, 'revids': revid_str, 'format': 'json', } ) if 'pages' not in data['query'].keys(): continue if 'badrevids' in data['query'].keys(): [revs_to_get.remove(i['revid']) for i in data['query']['badrevids'].values()] for pageid in data['query']['pages']: page = data['query']['pages'][pageid] for revision in page['revisions']: if ignore_imported_revs and \ revision['user'] in ['ImportUser', 'Admin'] or \ ignore_wikibot and revision['user'] == 'Wikibot': revs_to_get.remove(revision['revid']) continue this_rev = {} if flags: this_rev['minor'] = 'minor' in revision.keys() if timestamp: this_rev['time'] = time.strptime(revision['timestamp'], MEDIAWIKI_DATEFORMAT) if user: this_rev['user'] = re
Csega/pyTsai
windows_binary_2.4/windows_compiling/msvccompiler.py
Python
lgpl-2.1
22,127
0.004384
"""distutils.msvccompiler Contains MSVCCompiler, an implementation of the abstract CCompiler class for the Microsoft Visual Studio. """ # Written by Perry Stoll # hacked by Robin Becker and Thomas Heller to do a better job of # finding DevStudio (through the registry) # This module should be kept compatible with Python 2.1. __revision__ = "$Id: msvccompiler.py,v 1.64.2.4 2005/08/07 20:50:37 loewis Exp $" import sys, os, string from distutils.errors import \ DistutilsExecError, DistutilsPlatformError, \ CompileError, LibError, LinkError from distutils.ccompiler import \ CCompiler, gen_preprocess_options, gen_lib_options from distutils import log _can_read_reg = 0 try: import _winreg _can_read_reg = 1 hkey_mod = _winreg RegOpenKeyEx = _winreg.OpenKeyEx RegEnumKey = _winreg.EnumKey RegEnumValue = _winreg.EnumValue RegError = _winreg.error except ImportError: try: import win32api import win32con _can_read_reg = 1 hkey_mod = win32con RegOpenKeyEx = win32api.RegOpenKeyEx RegEnumKey = win32api.RegEnumKey RegEnumValue = win32api.RegEnumValue RegError = win32api.error except ImportError: log.info("Warning: Can't read registry to find the " "necessary compiler setting\n" "Make sure that Python modules _winreg, " "win32api or win32con are installed.") pass if _can_read_reg: HKEYS = (hkey_mod.HKEY_USERS, hkey_mod.HKEY_CURRENT_USER, hkey_mod.HKEY_LOCAL_MACHINE, hkey_mod.HKEY_CLASSES_ROOT) def read_keys(base, key): """Return list of registry keys.""" try: handle = RegOpenKeyEx(base, key) except RegError: return None L = [] i = 0 while 1: try: k = RegEnumKey(handle, i) except RegError: break L.append(k) i = i + 1 return L def read_values(base, key): """Return dict of registry keys and values. All names are converted to lowercase. """ try: handle = RegOpenKeyEx(base, key) except RegError: return None d = {} i = 0 while 1: try: name, value, type = RegEnumValue(handle, i) except RegError: break name = name.lower() d[convert_mbcs(name)] = convert_mbcs(value) i = i + 1 return d def convert_mbcs(s): enc = getattr(s, "encode", None) if enc is not None: try: s = enc("mbcs") except UnicodeError: pass return s class MacroExpander: def __init__(self, version): self.macros = {} self.load_macros(version) def set_macro(self, macro, path, key): for base in HKEYS: d = read_values(base, path) if d: self.macros["$(%s)" % macro] = d[key] break def load_macros(self, version): vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir") self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir") net = r"Software\Microsoft\.NETFramework" self.set_macro("FrameworkDir", net, "installroot") try: if version > 7.0: try: self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") except KeyError: # likely using free Command-line compiler with free SDK freeSDK = r"SOFTWARE\Microsoft\MicrosoftSDK\InstalledSDKs\63DADB24-DC99-45EB-A748-EC93AB8A7497" # following should raise key error if not available... self.set_macro( "FrameworkSDKDir", freeSDK, 'install dir' ) else: self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") except KeyError, exc: # raise DistutilsPlatformError, \ ("The .NET Framework SDK needs to be installed before " "building extensions for Python.") p = r"Software\Microsoft\NET Framework Setup\Product" for base in HKEYS: try: h = RegOpenKeyEx(base, p) except RegError: continue key = RegEnumKey(h, 0) d = read_values(base, r"%s\%s" % (p, key)) self.macros["$(FrameworkVersion)"] = d["version"] def sub(self, s): for k, v in self.macros.items(): s = string.replace(s, k, v) return s def get_build_version(): """Return the version of MSVC that was used to
build Python. For Python 2.3 and up, the version number is included in sys.version. For earlier versions, assume the compiler is MSVC 6. """ prefix = "MSC v." i = string.fin
d(sys.version, prefix) if i == -1: return 6 i = i + len(prefix) s, rest = sys.version[i:].split(" ", 1) majorVersion = int(s[:-2]) - 6 minorVersion = int(s[2:3]) / 10.0 # I don't think paths are affected by minor version in version 6 if majorVersion == 6: minorVersion = 0 if majorVersion >= 6: return majorVersion + minorVersion # else we don't know what version of the compiler this is return None class MSVCCompiler (CCompiler) : """Concrete class that implements an interface to Microsoft Visual C++, as defined by the CCompiler abstract class.""" compiler_type = 'msvc' # Just set this so CCompiler's constructor doesn't barf. We currently # don't use the 'set_executables()' bureaucracy provided by CCompiler, # as it really isn't necessary for this sort of single-compiler class. # Would be nice to have a consistent interface with UnixCCompiler, # though, so it's worth thinking about. executables = {} # Private class data (need to distinguish C from C++ source for compiler) _c_extensions = ['.c'] _cpp_extensions = ['.cc', '.cpp', '.cxx'] _rc_extensions = ['.rc'] _mc_extensions = ['.mc'] # Needed for the filename generation methods provided by the # base class, CCompiler. src_extensions = (_c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions) res_extension = '.res' obj_extension = '.obj' static_lib_extension = '.lib' shared_lib_extension = '.dll' static_lib_format = shared_lib_format = '%s%s' exe_extension = '.exe' def __init__ (self, verbose=0, dry_run=0, force=0): CCompiler.__init__ (self, verbose, dry_run, force) self.__version = get_build_version() if self.__version >= 7: self.__root = r"Software\Microsoft\VisualStudio" self.__macros = MacroExpander(self.__version) else: self.__root = r"Software\Microsoft\Devstudio" self.initialized = False def initialize(self): self.__paths = self.get_msvc_paths("path") if len (self.__paths) == 0: raise DistutilsPlatformError, \ ("Python was built with version %s of Visual Studio, " "and extensions need to be built with the same " "version of the compiler, but it isn't installed." % self.__version) self.cc = self.find_exe("cl.exe") self.linker = self.find_exe("link.exe") self.lib = self.find_exe("lib.exe") self.rc = self.find_exe("rc.exe") # resource compiler self.mc = self.find_exe("mc.exe") # message compiler self.set_path_env_var('lib') self.set_path_env_var('include') # extend the MSVC path with the current path try: for p in string.split(os.environ['path'], ';'): self.__paths.append(p) except KeyError: pass os.environ['path'] = string.join(self.__paths, ';') self.preprocess_options = None self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' , '/DNDEBUG'] self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
yahman72/robotframework
atest/testdata/standard_libraries/remote/specialerrors.py
Python
apache-2.0
689
0.001451
import sys from remoteserver import Direct
ResultRemoteServer class SpecialErrors(object): def
continuable(self, message, traceback): return self._special_error(message, traceback, continuable=True) def fatal(self, message, traceback): return self._special_error(message, traceback, fatal='this wins', continuable=42) def _special_error(self, message, traceback, continuable=False, fatal=False): return {'status': 'FAIL', 'error': message, 'traceback': traceback, 'continuable': continuable, 'fatal': fatal} if __name__ == '__main__': DirectResultRemoteServer(SpecialErrors(), *sys.argv[1:])
rereidy/SPSE
module 5 - Exploitation Techniques/5-1.py
Python
gpl-3.0
3,540
0.004237
#!/usr/bin/env python import threading import Queue import ftplib import getopt import os import sys import time DEF_THREAD_CNT = 5 DEF_NAP_TIME = 10 class FTPExcept(Exception): def __init__(self, v): self.value = v def __str__(self): return repr(self.value) class FTPWT(threading.Thread): def __init__(self, q, lock, s=DEF_NAP_TIME): threading.Thread.__init__(self) self.queue = q self.sleep_time = s self.lock = lock def run(self): ftpsite = self.queue.get() while True: try: print "Connect to site: " + ftpsite + " ================================================================" f = ftplib.FTP(ftpsite) f.login() if self.lock: self.lock.aquire() f.retrlines('LIST') if self.lock: self.lock.release() print "Listing completed ================================================================" except Exception as e: print >> sys.stderr, "FTPWT exception: ", e pass finally: time.sleep(self.sleep_time) self.queue.task_done() f.quit() def usage(): print >> sys.stderr, sys.argv[0] + ": list ftp site directory contents" print >> sys.stderr, "usage is: " + sys.argv[0] + " -s site_list -t thread_count -n sleep_time -l [-h]" print >> sys.stderr, "site_list can be a comma-delimited li
st of sites" def process_args(argv): sites = [] thread_count = -1 nap_time = -1 locking = False try: opts,
args = getopt.getopt(argv, 'hs:t:l') except getopt.GetoptError: usage() sys.exit(1) for opt, arg in opts: if opt == '-h': usage() sys.exit(2) elif opt == '-s': for s in (arg.split(',')): sites.append(s) elif opt == '-t': thread_count = int(arg) elif opt == '-n': nap_time = int(arg) elif opt == '-l': locking = True return (sites, thread_count, nap_time, locking) def queue_em(sites, locking, t=DEF_THREAD_CNT, s=DEF_NAP_TIME): queue = Queue.Queue() lock = None if locking: lock = threading.Lock() for i in range(1, t+1): worker = FTPWT(queue, lock, s=s) worker.setDaemon(True) worker.start() print "FTPWT worker %d created" %i for site in sites: queue.put(site.strip()) queue.join() if __name__ == "__main__": (sites, thread_count, nap_time, locking) = process_args(sys.argv[1:]) try: if len(sites) < 1: raise FTPExcept("no sites specified") if thread_count < 1: thread_count = DEF_THREAD_CNT print >> sys.stderr, "warning: no thread count (-t) specified - using default %d" %DEF_THREAD_CNT if len(sites) < thread_count: print >> sys.stderr, "thread count exceeds number of sites to check - using number of sites as thread count" thread_count = len(sites) if nap_time < 1: print >> sys.stderr, "warning: no sleep time (-n) argument specified - using default %d" %DEF_NAP_TIME nap_time = DEF_NAP_TIME queue_em(sites, locking, t=thread_count, s=nap_time) print "all threads completed" except FTPExcept as e: print "fatal error: ", e.value sys.exit(3)
spektom/incubator-airflow
airflow/contrib/sensors/gcs_sensor.py
Python
apache-2.0
3,286
0.003652
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #
KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs`.""" import warnings from airflow.providers.google.cloud.sensors.gcs import ( GCSObjectExistenceSensor, GCSObjectsWtihPrefixExistenceSensor, GCSObjectUpdateSensor, GCSUploadSessionCompleteSensor, ) warnings.warn( "This module i
s deprecated. Please use `airflow.providers.google.cloud.sensors.gcs`.", DeprecationWarning, stacklevel=2 ) class GoogleCloudStorageObjectSensor(GCSObjectExistenceSensor): """ This class is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) class GoogleCloudStorageObjectUpdatedSensor(GCSObjectUpdateSensor): """ This class is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) class GoogleCloudStoragePrefixSensor(GCSObjectsWtihPrefixExistenceSensor): """ This class is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectsWtihPrefixExistenceSensor`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectsWtihPrefixExistenceSensor`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) class GoogleCloudStorageUploadSessionCompleteSensor(GCSUploadSessionCompleteSensor): """ This class is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs)
uranusjr/django
django/db/backends/base/introspection.py
Python
bsd-3-clause
7,497
0.001601
from collections import namedtuple # Structure returned by DatabaseIntrospection.get_table_list() TableInfo = namedtuple('TableInfo', ['name', 'type']) # Structure returned by the DB-API cursor.description interface (PEP 249) FieldInfo = namedtuple('FieldInfo', 'name type_code display_size internal_size precision scale null_ok default') class BaseDatabaseIntrospection: """Encapsulate backend-specific introspection utilities.""" data_types_reverse = {} def __init__(self, connection): self.connection = connection def get_field_type(self, data_type, description): """ Hook for a database backend to use the cursor description to match a Django field type to a database column. For Oracle, the column data_type on its own is insufficient to distinguish between a FloatField and IntegerField, for example. """ return self.data_types_reverse[data_type] def table_name_converter(self, name): """ Apply a conversion to the name for the purposes of comparison. The default table name converter is for case sensitive comparison. """ return name def column_name_converter(self, name): """ Apply a conversion to the column name for the purposes of comparison. Use table_name_converter() by default. """ return self.table_name_converter(name) def table_names(self, cursor=None, include_views=False): """ Return a list of names of all tables that exist in the database. Sort the returned table list by Python's default sorting. Do NOT use the database's ORDER BY here to avoid subtle differences in sorting order between databases. """ def get_names(cursor): return sorted(ti.name for ti in self.get_table_list(cursor) if include_views or ti.type == 't') if cursor is None: with self.connection.cursor() as cursor: return get_names(cursor) return get_names(cursor) def get_table_list(self, cursor): """ Return an unsorted list of TableInfo named tuples of all tables and views that exist in the database. """ raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_table_list() method') def django_table_names(self, only_existing=False, include_views=True): """ Return a list of all table names that have associated Django models and are in INSTALLED_APPS. If only_existing is True, include only the tables in the database. """ from django.apps import apps from django.db import router tables = set() for app_config in apps.get_app_configs(): for model in router.get_migratable_models(app_co
nfig, self.connection.alias): if not model._meta.managed: continue tables.add(model._meta.db_table) tables.update( f.m2m_db_table() for f in model._meta.local_many_to_
many if f.remote_field.through._meta.managed ) tables = list(tables) if only_existing: existing_tables = self.table_names(include_views=include_views) tables = [ t for t in tables if self.table_name_converter(t) in existing_tables ] return tables def installed_models(self, tables): """ Return a set of all models represented by the provided list of table names. """ from django.apps import apps from django.db import router all_models = [] for app_config in apps.get_app_configs(): all_models.extend(router.get_migratable_models(app_config, self.connection.alias)) tables = list(map(self.table_name_converter, tables)) return { m for m in all_models if self.table_name_converter(m._meta.db_table) in tables } def sequence_list(self): """ Return a list of information about all DB sequences for all models in all apps. """ from django.apps import apps from django.db import router sequence_list = [] with self.connection.cursor() as cursor: for app_config in apps.get_app_configs(): for model in router.get_migratable_models(app_config, self.connection.alias): if not model._meta.managed: continue if model._meta.swapped: continue sequence_list.extend(self.get_sequences(cursor, model._meta.db_table, model._meta.local_fields)) for f in model._meta.local_many_to_many: # If this is an m2m using an intermediate table, # we don't need to reset the sequence. if f.remote_field.through is None: sequence = self.get_sequences(cursor, f.m2m_db_table()) sequence_list.extend(sequence or [{'table': f.m2m_db_table(), 'column': None}]) return sequence_list def get_sequences(self, cursor, table_name, table_fields=()): """ Return a list of introspected sequences for table_name. Each sequence is a dict: {'table': <table_name>, 'column': <column_name>}. An optional 'name' key can be added if the backend supports named sequences. """ raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_sequences() method') def get_key_columns(self, cursor, table_name): """ Backends can override this to return a list of: (column_name, referenced_table_name, referenced_column_name) for all key columns in given table. """ raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_key_columns() method') def get_primary_key_column(self, cursor, table_name): """ Return the name of the primary key column for the given table. """ for constraint in self.get_constraints(cursor, table_name).values(): if constraint['primary_key']: return constraint['columns'][0] return None def get_constraints(self, cursor, table_name): """ Retrieve any constraints or keys (unique, pk, fk, check, index) across one or more columns. Return a dict mapping constraint names to their attributes, where attributes is a dict with keys: * columns: List of columns this covers * primary_key: True if primary key, False otherwise * unique: True if this is a unique constraint, False otherwise * foreign_key: (table, column) of target, or None * check: True if check constraint, False otherwise * index: True if index, False otherwise. * orders: The order (ASC/DESC) defined for the columns of indexes * type: The type of the index (btree, hash, etc.) Some backends may return special constraint names that don't exist if they don't name constraints of a certain type (e.g. SQLite) """ raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_constraints() method')
sa2ajj/DistroTracker
pts/vendor/skeleton/rules.py
Python
gpl-2.0
11,857
0.000675
# Copyright 2013 The Distro Tracker Developers # See the COPYRIGHT file at the top-level directory of this distribution and # at http://deb.li/DTAuthors # # This file is part of Distro Tracker. It is subject to the license terms # in the LICENSE file found in the top-level directory of this # distribution and at http://deb.li/DTLicense. No part of Distro Tracker, # including this file, may be copied, modified, propagated, or distributed # except according to the terms contained in the LICENSE file. """ A skeleton of all vendor-specific function that can be implemented. """ from __future__ import unicode_literals def get_keyword(local_part, msg): """ The function should return a keyword which matches the message or ``None`` if it does not match any keyword or the vendor does not provide any custom keyword matching. :param local_part: The local part of the email address to which the message was sent :type local_part: string :param msg
: The original received package message :type msg: :py:class:`Message <email.message.Message>` """ pass def add_new_headers(received_message, package_name, keyword): """ The function should return a list of two-tuples (header_name, header_value) which are extra headers that should be added to package messages before they are forwarded to subscrib
ers. If no extra headers are wanted return an empty list or ``None`` :param received_message: The original received package message :type received_message: :py:class:`email.message.Message` :param package_name: The name of the package for which the message was intended :type package_name: string :param keyword: The keyword with which the message is tagged. :type keyword: string """ pass def approve_default_message(msg): """ The function should return a ``Boolean`` indicating whether this message should be forwarded to subscribers which are subscribed to default keyword messages. :param msg: The original received package message :type msg: :py:class:`email.message.Message` """ pass def get_pseudo_package_list(): """ The function should return a list of pseudo-packages (their names) which are to be considered valid pseudo-packages. Any existing pseudo-packages which are no longer found in this list will be "demoted" to subscription-only packages, instead of being deleted. If there should be no update to the list, the function should return ``None``. """ pass def get_package_information_site_url(package_name, source_package=False, repository_name=None): """ The function should return a URL to a package information Web page for the given package and repository. The repository parameter is optional. If no URL exists for the given parameters, returns ``None``. :param package_name: The name of the package for which the URL of the package information Web page should be given. :type package_name: string :param source_package: If ``True`` the function should consider the given package a source package, otherwise it should be considered a binary package. :type source_package: ``Boolean`` :param repository_name: The name of the repository for which the package information should be provided. """ pass def get_developer_information_url(developer_email): """ The function should return a URL which displays extra information about a developer, given his email. The function should return ``None`` if the vendor does not provide additional developer information or if it does not have the information for the particular developer email. In this case, on the package page, a <mailto> link will be provided, instead of the additional information. .. note:: This function can be used by other modules apart from the general panel :param developer_email: The email of the developer for which a URL to a site with additional information should be given. :type developer_email: string """ pass def get_external_version_information_urls(package_name): """ The function should return a list of external Web resources which provide additional information about the versions of a package. Each element of the list should be a dictionary with the keys: - url - description The function should return ``None`` if the vendor does not want to provide extra version information URLs. :param package_name: The name of the package for which external version information URLs should be provided. :type package_name: string """ pass def get_maintainer_extra(developer_email, package_name=None): """ The function should return a list of additional items that are to be included in the general panel next to the maintainer. Each item needs to be a dictionary itself and can contain the following keys: - display - description - url .. note:: Only the ``display`` key is mandatory. The function should return ``None`` if the vendor does not wish to include any extra items. :param developer_email: The email of the maintainer for which extra information is requested. :param package_name: The name of the package where the contributor is the maintainer and for which extra information should be provided. This parameter is included in case vendors want to provide different information based on the package page where the information will be displayed. """ pass def get_uploader_extra(developer_email, package_name=None): """ The function should return a list of additional items that are to be included in the general panel next to an uploader. Each item needs to be a dictionary itself and can contain the following keys: - display - description - url .. note:: Only the ``display`` key is mandatory. The function should return ``None`` if the vendor does not wish to include any extra items. :param developer_email: The email of the uploader for which extra information is requested. :param package_name: The name of the package where the contributor is an uploader and for which extra information should be provided. This parameter is included in case vendors want to provide different information based on the package page where the information will be displayed. """ pass def allow_package(stanza): """ The function provides a way for vendors to exclude some packages from being saved in the database. :param stanza: The raw package entry from a ``Sources`` file. :type stanza: case-insensitive dict """ pass def get_bug_tracker_url(package_name, package_type, category_name): """ The function provides a way for vendors to give a URL to a bug tracker based on a package name, its type and the bug category name. This function is used by :class:`BugsPanel <pts.core.panels.BugsPanel>` to include a link to the bug tracking site on top of the known bug statistics. :param package_name: The name of the package for which the bug tracker URL should be provided. :param package_type: The type of the package for which the bug tracker URL should be provided. It is one of: ``source``, ``pseudo`` or ``binary``. :param category_name: The name of the bug tracker category for which the URL should be provided. :returns: The bug tracker URL for the package and given category. :rtype: string or ``None`` if the vendor does not have a bug tracker URL for the given parameters. """ pass def get_bug_panel_stats(package_name): """ The function provides a way for vendors to customize the bug categories displayed in the :class:`BugsPanel <pts.core.panels.BugsPanel>`. This is useful if the vendor does not want to have all categories which are stored
sysuccc/QiuDaBao
manage.py
Python
gpl-2.0
251
0
#!/usr/bin/env python
import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "QiuDaBao.settin
gs") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
ArcherSys/ArcherSys
skulpt/test/run/t242.py
Python
mit
262
0.072519
class O(obje
ct): pass class A(O): pass class B(O): pass clas
s C(O): pass class D(O): pass class E(O): pass class K1(A,B,C): pass class K2(D,B,E): pass class K3(D,A): pass class Z(K1,K2,K3): pass print K1.__mro__ print K2.__mro__ print K3.__mro__ print Z.__mro__
RyanSkraba/beam
sdks/python/apache_beam/examples/streaming_wordcount_it_test.py
Python
apache-2.0
4,580
0.001965
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0
# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """End-to-end test for the streaming wordcount example.""" from __future__ import absolute_import import logging import unittest import uuid from builtins import range from hamcrest.cor
e.core.allof import all_of from nose.plugins.attrib import attr from apache_beam.examples import streaming_wordcount from apache_beam.io.gcp.tests.pubsub_matcher import PubSubMessageMatcher from apache_beam.runners.runner import PipelineState from apache_beam.testing import test_utils from apache_beam.testing.pipeline_verifiers import PipelineStateMatcher from apache_beam.testing.test_pipeline import TestPipeline INPUT_TOPIC = 'wc_topic_input' OUTPUT_TOPIC = 'wc_topic_output' INPUT_SUB = 'wc_subscription_input' OUTPUT_SUB = 'wc_subscription_output' DEFAULT_INPUT_NUMBERS = 500 WAIT_UNTIL_FINISH_DURATION = 6 * 60 * 1000 # in milliseconds class StreamingWordCountIT(unittest.TestCase): def setUp(self): self.test_pipeline = TestPipeline(is_integration_test=True) self.project = self.test_pipeline.get_option('project') self.uuid = str(uuid.uuid4()) # Set up PubSub environment. from google.cloud import pubsub self.pub_client = pubsub.PublisherClient() self.input_topic = self.pub_client.create_topic( self.pub_client.topic_path(self.project, INPUT_TOPIC + self.uuid)) self.output_topic = self.pub_client.create_topic( self.pub_client.topic_path(self.project, OUTPUT_TOPIC + self.uuid)) self.sub_client = pubsub.SubscriberClient() self.input_sub = self.sub_client.create_subscription( self.sub_client.subscription_path(self.project, INPUT_SUB + self.uuid), self.input_topic.name) self.output_sub = self.sub_client.create_subscription( self.sub_client.subscription_path(self.project, OUTPUT_SUB + self.uuid), self.output_topic.name, ack_deadline_seconds=60) def _inject_numbers(self, topic, num_messages): """Inject numbers as test data to PubSub.""" logging.debug('Injecting %d numbers to topic %s', num_messages, topic.name) for n in range(num_messages): self.pub_client.publish(self.input_topic.name, str(n).encode('utf-8')) def tearDown(self): test_utils.cleanup_subscriptions(self.sub_client, [self.input_sub, self.output_sub]) test_utils.cleanup_topics(self.pub_client, [self.input_topic, self.output_topic]) @attr('IT') def test_streaming_wordcount_it(self): # Build expected dataset. expected_msg = [('%d: 1' % num).encode('utf-8') for num in range(DEFAULT_INPUT_NUMBERS)] # Set extra options to the pipeline for test purpose state_verifier = PipelineStateMatcher(PipelineState.RUNNING) pubsub_msg_verifier = PubSubMessageMatcher(self.project, self.output_sub.name, expected_msg, timeout=400) extra_opts = {'input_subscription': self.input_sub.name, 'output_topic': self.output_topic.name, 'wait_until_finish_duration': WAIT_UNTIL_FINISH_DURATION, 'on_success_matcher': all_of(state_verifier, pubsub_msg_verifier)} # Generate input data and inject to PubSub. self._inject_numbers(self.input_topic, DEFAULT_INPUT_NUMBERS) # Get pipeline options from command argument: --test-pipeline-options, # and start pipeline job by calling pipeline main function. streaming_wordcount.run( self.test_pipeline.get_full_options_as_args(**extra_opts), save_main_session=False) if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG) unittest.main()
pat1/autoradio
autoradio/mpris2/decorator/__init__.py
Python
gpl-2.0
323
0.003096
''' This is
not part of specification Helper class to make it work as python lib ''' from .attribute import DbusAttr from .interface import DbusInterface from .method import DbusMethod from .signal import DbusSignal from .utils import get_mainloop, get_uri, implements, \ list_all_interface, list
_interfaces, list_paths
mayfield/plexcli
plexcli/commands/activity.py
Python
mit
1,539
0
""" Activity logs. """ import asyncio import datetime import json import websockets from . import base from shellish.layout import Table class Log(base.PlexCommand): """ Show activity log """ name = 'log' type_map = { 'StatusNotification': 'Status', 'ProgressNotification': 'Progress' } @asyncio.coroutine def notifications(self, table): server = self.serverapi.uri.split('://', 1)[1] notif_url = 'ws://%s/:/websockets/notifications' % server feed = yield from websockets.connect(notif_url) while True: data = yield from feed.recv() if data is None: break table.print(json.loads(data).get('_children')) yield from feed.close() def get_ts(self, obj): return datetime.datetime.now().strftime('%I:%M:%S %p') def get_type(self, obj): return
self.type_map[obj['_elementType']] def get_msg(self, obj): if 'message' in obj: return obj['message'] return '%s: %s' % (obj['title'], obj['description']) def run(self, args): headers = ['Date', 'Type', 'Message'] accessors = [self.get_ts, self.get_type, self.get_msg] table = Table(headers=headers, accessors=accessors) evloop = asyncio.get_event_lo
op() with evloop.run_until_complete(self.notifications(table)): pass activity = base.PlexCommand(name='activity', doc=__doc__) activity.add_subcommand(Log, default=True) __commands__ = [activity]
kenshay/ImageScript
ProgramData/SystemFiles/Python/Lib/site-packages/elan/Pools/AddRemove/1_Count_Security_Panels.py
Python
gpl-3.0
2,762
0.005069
from ImageScripter import * from elan.functions import Get_Device_List_Simple,Diff from elan import * #Past_List = ['2GIG GC3', '2GIG GC3', '2GIG GC3 (3.2 firmware)', 'Ademco VISTA-128BP,250BP,FA1660C', 'Ademco VISTA-128BPT,250BPT', 'Ademco VISTA-128FBP,250FBP', 'Bosch/Radionics D7412G,D9412G', 'DSC MAXSYS', 'DSC Power Series / 5401', 'DSC Power Series / IT-100', 'ELK-M1', 'GE Concord', 'GE NetworX NX-4,6,8,8E', 'HAI Omni Series', 'Napco Gemini GEM-X255, P9600', 'Paradox Digiplex', 'Texecom Premier Elite', 'Virtual Security Controller'] #Past_List = ['2GIG GC3', '2GIG GC3 (3.2 firmware)', 'Ademco VISTA-128BP,250BP,FA1660C', 'Ademco VISTA-128BPT,250BPT', 'Ademco VISTA-128FBP,250FBP', 'Bosch/Radionics D7412G,D9412G', 'DSC MAXSYS', 'DSC Power Series / 5401', 'DSC Power Series / IT-100', 'ELK-M1', 'GE Concord', 'GE NetworX NX-4,6,8,8E', 'HAI Omni Series', 'Napco Gemini GEM-X255, P9600', 'Paradox Digiplex', 'Texecom Premier Elite', 'Vario (IP)', 'Vario (RS-232)', 'Virtual Security Controller'] #Past_List = ['2GIG GC3', '2GIG GC3', '2GIG GC3 (3.2 firmware)', 'Ademco VISTA-128BP,250BP,FA1660C', 'Ademco VISTA-128BPT,250BPT', 'Ademco V
ISTA-128FBP,250FBP', 'Bosch/Radionics D7412G,D94
12G', 'DSC MAXSYS', 'DSC Power Series / 5401', 'DSC Power Series / IT-100', 'ELK-M1', 'GE Concord', 'GE NetworX NX-4,6,8,8E', 'HAI Omni Series', 'Napco Gemini GEM-X255, P9600', 'Paradox Digiplex', 'Texecom Premier Elite', 'Vario (IP)', 'Vario (RS-232)', 'Virtual Security Controller'] Past_List = ['2GIG GC3', '2GIG GC3 (3.2 firmware)', 'Ademco VISTA-128BP,250BP,FA1660C', 'Ademco VISTA-128BPT,250BPT', 'Ademco VISTA-128FBP,250FBP', 'Bosch/Radionics D7412G,D9412G', 'DSC MAXSYS', 'DSC Power Series / 5401', 'DSC Power Series / IT-100', 'ELK-M1', 'GE Concord', 'GE NetworX NX-4,6,8,8E', 'HAI Omni Series', 'Napco Gemini GEM-X255, P9600', 'Paradox Digiplex', 'Texecom Premier Elite', 'Vario (IP)', 'Vario (RS-232)', 'Virtual Security Controller'] One = Configurator.security Two = Configurator.securitypanels Current_List = Get_Device_List_Simple(One,Two) Configurator.system.Click() if Current_List != Past_List: Difference = Diff(Past_List,Current_List) error = 'List Changed\n' + str(Difference) raise ValueError(error) ''' from ImageScripter import * from elan.functions import Get_Device_Count from elan import * One = Configurator.security Two = Configurator.securitypanels count = 18 newcount = Get_Device_Count_Simple(One,Two) print("New Count is " + str(newcount)) if count != newcount: Say('Count for ' + Two.DisplayName + " is off") raise ValueError('Exception 11 -> Count for ' + Two.DisplayName + " is off, raising error") else: Say("The new count matches the old count. The test has passed") Configurator.system.Click() '''
volpino/Yeps-EURAC
tools/regVariation/substitutions.py
Python
mit
2,849
0.015444
#! /usr/bin/python #Guruprasad ANanda """ Fetches substitutions from pairwise alignments. """ from galaxy import eggs from galaxy.tools.util import maf_utilities import bx.align.maf import sys import os, fileinput def stop_err(msg): sys.stderr.write(msg) sys.exit() if len(sys.argv) < 3: stop_err("Incorrect number of arguments.") inp_file = sys.argv[1] out_file = sys.argv[2] fout = open(out_file, 'w') def fetchSubs(block): src1 = block.components[0].src sequence1 = block.components[0].text start1 = block.components[0].start end1 = block.components[0].end len1 = int(end1)-int(start1) len1_withgap = len(sequence1) for seq in range (1,len(block.components)): src2 = block.components[seq].src sequence2 = block.components[seq].text start2 = block.components[seq].start end2 = block.components[seq].end len2 = int(end2)-int(start2) sub_begin = None sub_end = None begin = False for nt in range(len1_withgap): if sequence1[nt] not in '-#$^*?' and sequence2[nt] not in '-#$^*?': #Not a gap or masked character if sequence1[nt].upper() != sequence2[nt].upper(): if not(begin): sub_begin = nt begin = True sub_end = nt else: if begin: print >>fout, "%s\t%s\t%s" %(src1,start1+sub_begin-sequence1[0:sub_begin].count('-'),start1+sub_end-sequence1[0:sub_end].cou
nt('-')) print >>fout, "%s\t%s\t%s" %(src2,start2+sub_be
gin-sequence2[0:sub_begin].count('-'),start2+sub_end-sequence2[0:sub_end].count('-')) begin = False else: if begin: print >>fout, "%s\t%s\t%s" %(src1,start1+sub_begin-sequence1[0:sub_begin].count('-'),end1+sub_end-sequence1[0:sub_end].count('-')) print >>fout, "%s\t%s\t%s" %(src2,start2+sub_begin-sequence2[0:sub_begin].count('-'),end2+sub_end-sequence2[0:sub_end].count('-')) begin = False ended = False def main(): skipped = 0 not_pairwise = 0 try: maf_reader = bx.align.maf.Reader( open(inp_file, 'r') ) except: stop_err("Your MAF file appears to be malformed.") print >>fout, "#Chr\tStart\tEnd" for block in maf_reader: if len(block.components) != 2: not_pairwise += 1 continue try: fetchSubs(block) except: skipped += 1 if not_pairwise: print "Skipped %d non-pairwise blocks" %(not_pairwise) if skipped: print "Skipped %d blocks" %(skipped) if __name__ == "__main__": main()
dashee87/cluster-flag
clusterflag/__init__.py
Python
mit
23
0
__v
ersion__ = '0.1.
2'
Adventure-Inc/chachas-adventures
services/views.py
Python
apache-2.0
167
0
from django.shortcuts import render from django.
http import HttpRespo
nse import json def services(request): return render(request, 'services/services.html', {})
bendudson/BOUT
examples/test-staggered/generate.py
Python
gpl-3.0
522
0.005747
#!/usr
/bin/env python # # Generate an input mesh # from boututils import DataFile # Wrapper around NetCDF4 libraries nx = 5 # Minimum is 5: 2 boundary, one evolved ny = 32 # Minimum 5. Should be divisible by number of processors (so powers of 2 nice) d
y = 1. # distance between points in y, in m/g22/lengthunit ixseps1 = -1 ixseps2 = -1 f = DataFile() f.open("test-staggered.nc", create=True) f.write("nx", nx) f.write("ny", ny) f.write("dy", dy) f.write("ixseps1", ixseps1) f.write("ixseps2", ixseps2) f.close()
reminisce/mxnet
tests/python/unittest/test_random.py
Python
apache-2.0
50,683
0.009569
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, #
software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os import math import itertools import mxnet as mx from mxnet.test_utils imp
ort verify_generator, gen_buckets_probs_with_ppf, retry import numpy as np import random as rnd from common import setup_module, with_seed, random_seed, teardown import scipy.stats as ss import unittest from mxnet.test_utils import * def same(a, b): return np.sum(a != b) == 0 def check_with_device(device, dtype): # The thresholds chosen for the tests are too loose. We will rely on the other tests to test the samples from the # generators. tol = 0.1 symbols = [ { 'name': 'normal', 'symbol': mx.sym.random.normal, 'ndop': mx.nd.random.normal, 'pdfsymbol': mx.sym.random_pdf_normal, 'pdffunc': ss.norm.pdf, 'discrete': False, 'params': { 'loc': 10.0, 'scale': 0.5 }, 'inputs': [ ('loc',[ [ 0.0, 2.5 ], [ -9.75, -7.0 ] ]) , ('scale',[ [ 1.0, 3.7 ], [ 4.2, 1.5 ] ]) ], 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64) - params['loc']), tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - params['scale'], tol) ] }, { 'name': 'normal_like', 'symbol': mx.sym.random.normal_like, 'ndop': mx.nd.random.normal_like, 'params': { 'loc': 10.0, 'scale': 0.5 }, 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64) - params['loc']), tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - params['scale'], tol) ] }, { 'name': 'randn', 'symbol': mx.sym.random.randn, 'ndop': mx.nd.random.randn, 'params': { 'loc': 10.0, 'scale': 0.5 }, 'inputs': [ ('loc',[ [ 0.0, 2.5 ], [ -9.75, -7.0 ] ]) , ('scale',[ [ 1.0, 3.7 ], [ 4.2, 1.5 ] ]) ], 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64) - params['loc']), tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - params['scale'], tol) ] }, { 'name': 'uniform', 'symbol': mx.sym.random.uniform, 'ndop': mx.nd.random.uniform, 'pdfsymbol': mx.sym.random_pdf_uniform, 'pdffunc': lambda x, low, high: ss.uniform.pdf(x, low, high-low), 'discrete': False, 'params': { 'low': -1.5, 'high': 3.0 }, 'inputs': [ ('low', [ [ 0.0, 2.5 ], [ -9.75, -1.0 ] ]) , ('high', [ [ 1.0, 3.7 ], [ 4.2, 10.5 ] ]) ], 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64)) - (params['low'] + params['high']) / 2.0, tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - np.sqrt(1.0 / 12.0) * (params['high'] - params['low']), tol) ] }, { 'name': 'uniform_like', 'symbol': mx.sym.random.uniform_like, 'ndop': mx.nd.random.uniform_like, 'params': { 'low': -1.5, 'high': 3.0 }, 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64)) - (params['low'] + params['high']) / 2.0, tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - np.sqrt(1.0 / 12.0) * (params['high'] - params['low']), tol) ] }, { 'name': 'gamma', 'symbol': mx.sym.random.gamma, 'ndop': mx.nd.random.gamma, 'pdfsymbol': mx.sym.random_pdf_gamma, 'pdffunc': lambda x, alpha, beta: ss.gamma.pdf(x, alpha, 0, 1/beta), 'discrete': False, 'params': { 'alpha': 9.0, 'beta': 0.5 }, 'inputs': [ ('alpha', [ [ 0.1, 2.5 ], [ 9.75, 11.0 ] ]) , ('beta', [ [ 1.0, 0.7 ], [ 0.5, 0.3 ] ]) ], 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64)) - params['alpha'] * params['beta'], tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - np.sqrt(params['alpha'] * params['beta'] ** 2), tol) ] }, { 'name': 'gamma_like', 'symbol': mx.sym.random.gamma_like, 'ndop': mx.nd.random.gamma_like, 'params': { 'alpha': 9.0, 'beta': 0.5 }, 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64)) - params['alpha'] * params['beta'], tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - np.sqrt(params['alpha'] * params['beta'] ** 2), tol) ] }, { 'name': 'exponential', 'symbol': mx.sym.random.exponential, 'ndop': mx.nd.random.exponential, 'pdfsymbol': mx.sym.random_pdf_exponential, 'pdffunc': lambda x, lam: ss.expon.pdf(x, 0, 1/lam), 'discrete': False, 'params': { 'scale': 1.0/4.0 }, 'inputs': [ ('scale', [ [ 1.0/1.0, 1.0/8.5 ], [ 1.0/2.7 , 1.0/0.5 ] ]) ], 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64)) - params['scale'], tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - params['scale'], tol) ] }, { 'name': 'exponential_like', 'symbol': mx.sym.random.exponential_like, 'ndop': mx.nd.random.exponential_like, 'params': { 'lam': 4.0 }, 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64)) - 1.0/params['lam'], tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - 1.0/params['lam'], tol) ] }, { 'name': 'poisson', 'symbol': mx.sym.random.poisson, 'ndop': mx.nd.random.poisson, 'pdfsymbol': mx.sym.random_pdf_poisson, 'pdffunc': ss.poisson.pmf, 'discrete': True, 'params': { 'lam': 4.0 }, 'inputs': [ ('lam', [ [ 25.0, 8.5 ], [ 2.7 , 0.5 ] ]) ], 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64)) - params['lam'], tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - np.sqrt(params['lam']), tol) ] }, { 'name': 'poisson_like', 'symbol': mx.sym.random.poisson_like, 'ndop': mx.nd.random.poisson_like, 'params': { 'lam': 4.0 }, 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64)) - params['lam'], tol), ('std', lambda x, params: np.std(x.astype(np.float64)) - np.sqrt(params['lam']), tol) ] }, { 'name': 'neg_binomial', 'symbol': mx.sym.random.negative_binomial, 'ndop': mx.nd.random.negative_binomial, 'pdfsymbol': mx.sym.random_pdf_negative_binomial, 'pdffunc': ss.nbinom.pmf, 'discrete': True, 'params': { 'k': 3, 'p': 0.4 }, 'inputs': [ ('k', [ [ 3, 4 ], [ 5 , 6 ] ]) , ('p', [ [ 0.4 , 0.77 ], [ 0.5, 0.84 ] ]) ], 'checks': [ ('mean', lambda x, params: np.mean(x.astype(np.float64)) - params['k'] * (1.0 - params['p']) / params['p'], tol),
punalpatel/st2
st2common/st2common/exceptions/db.py
Python
apache-2.0
1,337
0
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from st2common.exceptions import
StackStormBaseException class StackStormDBObjectNotFoundError(StackStormBaseException): pass class StackStormDBObjectMalformedError(StackStormBaseException): pass class StackStormDBObjectConflict
Error(StackStormBaseException): """ Exception that captures a DB object conflict error. """ def __init__(self, message, conflict_id, model_object): super(StackStormDBObjectConflictError, self).__init__(message) self.conflict_id = conflict_id self.model_object = model_object
tanglei528/ceilometer
ceilometer/compute/notifications/__init__.py
Python
apache-2.0
1,338
0
# -*- encoding: utf-8 -*- # # Copyright © 2013 Intel # # Author: Shuangtai Tian <shuangtai.tian@intel.com> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agree
d to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg import oslo.messaging from ceilometer import plugin OPTS = [ cfg.StrOpt('nova_control_exchange',
default='nova', help="Exchange name for Nova notifications."), ] cfg.CONF.register_opts(OPTS) class ComputeNotificationBase(plugin.NotificationBase): @staticmethod def get_targets(conf): """Return a sequence of oslo.messaging.Target defining the exchange and topics to be connected for this plugin. """ return [oslo.messaging.Target(topic=topic, exchange=conf.nova_control_exchange) for topic in conf.notification_topics]
iphoting/healthchecks
hc/api/tests/test_notify_email.py
Python
bsd-3-clause
5,005
0
# coding: utf-8 from datetime import timedelta as td import json from django.core import mail from django.utils.timezone import now from hc.api.models import Channel, Check, Notification, Ping from hc.test import BaseTestCase class NotifyEmailTestCase(BaseTestCase): def setUp(self): super().setUp() self.check = Check(project=self.project) self.check.name = "Daily Backup" self.check.desc = "Line 1\nLine2" self.check.tags = "foo bar" self.check.status = "down" self.check.last_ping = now() - td(minutes=61) self.check.n_pings = 112233 self.check.save() self.ping = Ping(owner=self.check) self.ping.remote_addr = "1.2.3.4" self.ping.body = "Body Line 1\nBody Line 2" self.ping.save() self.channel = Channel(project=self.project) self.channel.kind = "email" self.channel.value = "alice@example.org" self.channel.email_verified = True self.channel.save() self.channel.checks.add(self.check) def test_email(self): self.channel.notify(self.check) n = Notification.objects.get() self.assertEqual(n.error, "") # And email should have been sent self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] self.assertEqual(email.to[0], "alice@example.org") self.assertEqual(email.extra_headers["X-Status-Url"], n.status_url()) self.assertTrue("List-Unsubscribe" in email.extra_headers) self.assertTrue("List-Unsubscribe-Post" in email.extra_headers) html = email.alternatives[0][0] self.assertIn("Daily Backup", html) self.assertIn("Line 1<br>Line2", html) self.assertIn("Alices Project", html) self.assertIn("foo</code>", html) self.assertIn("bar</code>", html) self.assertIn("1 day", html) self.assertIn("from 1.2.3.4", html) self.assertIn("112233", html) self.assertIn("Body Line 1<br>Body Line 2", html) # Check's code must not be in the html self.assertNotIn(str(self.check.code), html) # Check's code must not be in the plain text body self.assertNotIn(str(self.check.code), email.body) def test_it_shows_cron_schedule(self): self.check.kind = "cron" self.che
ck.schedule = "0 18-23,0-8 * * *" self.check.save() self.channel.notify(self.check) email = mail.outbox[0] html = email.alternatives[0][0] self.assertIn("<code>0 18-23,0-8 * * *</code>", html) def test_it_truncates_long_body(self): self.ping.body = "X" * 10000 + ", and the rest gets cut off" self.ping.save() self.channel.notify(self.check) email = mail.outbox[0] html = email.altern
atives[0][0] self.assertIn("[truncated]", html) self.assertNotIn("the rest gets cut off", html) def test_it_handles_missing_ping_object(self): self.ping.delete() self.channel.notify(self.check) email = mail.outbox[0] html = email.alternatives[0][0] self.assertIn("Daily Backup", html) def test_it_handles_missing_profile(self): self.channel.value = "alice+notifications@example.org" self.channel.save() self.channel.notify(self.check) email = mail.outbox[0] self.assertEqual(email.to[0], "alice+notifications@example.org") html = email.alternatives[0][0] self.assertIn("Daily Backup", html) self.assertNotIn("Projects Overview", html) def test_email_transport_handles_json_value(self): payload = {"value": "alice@example.org", "up": True, "down": True} self.channel.value = json.dumps(payload) self.channel.save() self.channel.notify(self.check) # And email should have been sent self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] self.assertEqual(email.to[0], "alice@example.org") def test_it_reports_unverified_email(self): self.channel.email_verified = False self.channel.save() self.channel.notify(self.check) # If an email is not verified, it should say so in the notification: n = Notification.objects.get() self.assertEqual(n.error, "Email not verified") def test_email_checks_up_down_flags(self): payload = {"value": "alice@example.org", "up": True, "down": False} self.channel.value = json.dumps(payload) self.channel.save() self.channel.notify(self.check) # This channel should not notify on "down" events: self.assertEqual(Notification.objects.count(), 0) self.assertEqual(len(mail.outbox), 0) def test_email_handles_amperstand(self): self.check.name = "Foo & Bar" self.check.save() self.channel.notify(self.check) email = mail.outbox[0] self.assertEqual(email.subject, "DOWN | Foo & Bar")
california-civic-data-coalition/django-calaccess-downloads-website
toolbox/apps.py
Python
mit
130
0
fro
m __future__ import unicode_literals from django.apps import AppConfig class ToolboxConfig(AppConfig):
name = 'toolbox'
fmarani/spam
spam/spamhaus.py
Python
lgpl-3.0
1,760
0.001705
#!/usr/bin/env python from urlparse import urlparse from socket import gethostbyname from spam import DomainInexistentException class SpamHausChecker(object): """spam checker using spamhaus""" IS_SPAM = 1 IS_NOT_SPAM = 2 def _query_spamhaus(self, spamhaus_zone): try: return gethostbyname(spamhaus_zone) except Exception: return None def _resolve(self, domain): try: return gethostbyname(domain) except Exception: return None def _build_spamhaus_zone(self, ip): ip_segments = ip.split(".") ip_segments.reverse() return ".".join(ip_segments) + ".zen.spamhaus.org" def _decode_spamhaus(self, spamhaus_result): if spamhaus_result: return self.IS_SPAM else: return self
.IS_NOT_SPAM def check_url(self, url): """check an url""" domain = urlparse(url).netloc return self.check_domain(domain) def check_domain(self, domain): """check a domain""" domain = domain[domain.find('@')+1:] # remove user info if domain.count(":") > 0: domain = domain[:domain.find(':')] # remove port info ip = self._resolve(domain) if not ip: raise Domai
nInexistentException spamhaus_zone = self._build_spamhaus_zone(ip) spamhaus_result = self._query_spamhaus(spamhaus_zone) return self._decode_spamhaus(spamhaus_result) def is_spam(self, url): """shortcut for check_url == IS_SPAM""" return self.check_url(url) == self.IS_SPAM def is_not_spam(self, url): """shortcut for check_url == IS_NOT_SPAM""" return self.check_url(url) == self.IS_NOT_SPAM
jehomez/pymeadmin
actualizacion_de_precios.py
Python
gpl-2.0
1,494
0.003347
import gtk import treetohtml from mensajes import info, yesno from datetime import date, datetime from articulos import DlgArticulo from articulos_produccion import ArticulosEnProduccion from modelo import Model from comunes import punto_coma, coma_punto, caracter_a_logico, logico_a_caracter, calcular_iva_venta, calcular_precio_neto, calcular_precio_venta, calcular_utilidad class ActualizarPrecios: def main(self): gtk.main() return 0 def __init__(self, padre=None): builder = gtk.Builder() builder.add_from_file('dlgActualizacionPrecios.glade') builder.connect_signals(self)
self.dialogo = builder.get_object('dialogo') self.scroll = builder.get_object('scroll_window') self.tree = builder.get_object('vista') self.lista = builder.get_object('lista') self.opcion_algunos = builder.get_object('algunos') s
elf.opcion_todos = builder.get_object('todos') self.dialogo.show() def on_todos_group_changed(self, *args): pass def on_algunos_group_changed(self, *args): if self.opcion_algunos.get_active() == 1: self.scroll.set_visible(True) self.tree.set_visible(True) def on_aceptar_clicked(self, *args): pass def on_salir_clicked(self, *args): self.on_dialogo_destroy() def on_dialogo_destroy(self, *args): self.dialogo.destroy() if __name__ == '__main__': ActualizarPrecios().main()
walteryang47/ovirt-engine
packaging/pythonlib/ovirt_engine/ticket.py
Python
apache-2.0
3,464
0
import base64 import datetime import json from M2Crypto import EVP, X509, Rand class TicketEncoder(): @staticmethod def _formatDate(d): return d.strftime("%Y%m%d%H%M%S") def __init__(self, cert, key, lifetime=5): self._lifetime = lifetime self._x509 = X509.load_cert(cert) self._pkey = EVP.load_key(key) def encode(self, data): d = { 'salt': base64.b64encode(Rand.rand_bytes(8)), 'digest': 'sha1', 'validFrom': self._formatDate(datetime.datetime.utcnow()), 'validTo': self._formatDate( datetime.datetime.utcnow() + datetime.timedelta( seconds=self._lifetime ) ), 'data': data } self._pkey.reset_context(md=d['digest']) self._pkey.sign_init() fields = [] for k, v in d.items(): fields.append(k) self._pkey.sign_update(v) d['signedFields'] = ','.join(fields) d['signature'] = base64.b64encode(self._pkey.sign_final()) d['certificate'] = self._x509.as_pem() return base64.b64encode(json.dumps(d)) class TicketDecoder(): _peer = None _ca = None @staticmethod def _parseDate(d): return datetime.datetime.strptime(d, '%Y%m%d%H%M%S') @staticmethod def _verifyCertificate(ca, x509): if x509.verify(ca.get_pubkey()) == 0: raise ValueError('Untrusted certificate') if not ( x509.get_not_before().get_datetime().replace(tzinfo=None) <= datetime.datetime.utcnow() <= x509.get_not_after().get_datetime().replace(tzinfo=None) ): raise ValueError('Certificate expired') def __init__(self, ca, eku, peer=None): self._eku = eku if peer is not None: self._peer = X509.load_cert_string(peer) if ca is not None: self._ca = X509.load_cert(ca) def decode(self, ticket): decoded = json.loads(base64.b64decode(ticket)) if self._peer is not None: x509 = self._peer else: x509 = X509.load_cert_string( decoded['certificate'].encode('utf8') ) if self._ca is not None: self._verifyCertificate(self._ca, x509) if self._eku is not None: if self._eku not in x509.get_ext( 'extendedKeyUsage' ).get_value().split(','): raise ValueError('Certificate is not authorized for action') signedFields = [s.strip() for s in decoded['signedFields'].split(',')] if len( set(['salt', 'data']) & set(signedFields) ) == 0: raise ValueError('Invalid ticket') pkey = x509.get_pubkey() pkey.reset_context(md=decoded['digest']) pkey.verify_init() for field in signedFields: pkey.verify_update(decoded[field].encode('utf8')) if pkey.verify_final( base64.b64decode(decoded['signature'])
) != 1: raise ValueError('Invalid ticket signature') if not ( self._parseDate(decoded['validFrom']) <= datetime.datetime.utcnow() <=
self._parseDate(decoded['validTo']) ): raise ValueError('Ticket life time expired') return decoded['data'] # vim: expandtab tabstop=4 shiftwidth=4
tobspr/LUI
Builtin/LUIInputField.py
Python
mit
8,056
0.002234
import re from LUIObject import LUIObject from LUISprite import LUISprite from LUILabel import LUILabel from LUIInitialState import LUIInitialState from L
UILayouts import LUIHorizontalStretchedLayout __all__ = ["LUIInputField"] class LUIInputField(LUIObject): """ Simple input field, accepting text input. This input field supports entering text and navigating. Selecting text is (currently) not supported. The input field also supports various keyboard shortcuts: [pos1] Move to the beginning of the text [end] Move to the end of the text
[arrow_left] Move one character to the left [arrow_right] Move one character to the right [ctrl] + [arrow_left] Move to the left, skipping over words [ctrl] + [arrow_right] Move to the right, skipping over words [escape] Un-focus input element """ re_skip = re.compile("\W*\w+\W") def __init__(self, parent=None, width=200, placeholder=u"Enter some text ..", value=u"", **kwargs): """ Constructs a new input field. An input field always needs a width specified """ LUIObject.__init__(self, x=0, y=0, solid=True) self.set_width(width) self._layout = LUIHorizontalStretchedLayout(parent=self, prefix="InputField", width="100%") # Container for the text self._text_content = LUIObject(self) self._text_content.margin = (5, 7, 5, 7) self._text_content.clip_bounds = (0,0,0,0) self._text_content.set_size("100%", "100%") # Scroller for the text, so we can move right and left self._text_scroller = LUIObject(parent=self._text_content) self._text_scroller.center_vertical = True self._text = LUILabel(parent=self._text_scroller, text="") # Cursor for the current position self._cursor = LUISprite(self._text_scroller, "blank", "skin", x=0, y=0, w=2, h=15) self._cursor.color = (0.5, 0.5, 0.5) self._cursor.margin.top = 2 self._cursor.z_offset = 20 self._cursor_index = 0 self._cursor.hide() self._value = value # Placeholder text, shown when out of focus and no value exists self._placeholder = LUILabel(parent=self._text_content, text=placeholder, shadow=False, center_vertical=True, alpha=0.2) # Various states self._tickrate = 1.0 self._tickstart = 0.0 self._render_text() if parent is not None: self.parent = parent LUIInitialState.init(self, kwargs) @property def value(self): """ Returns the value of the input field """ return self._value @value.setter def value(self, new_value): """ Sets the value of the input field """ self._value = new_value self._render_text() self.trigger_event("changed", self._value) def clear(self): """ Clears the input value """ self.value = u"" @property def cursor_pos(self): """ Set the cursor position """ return self._cursor_index @cursor_pos.setter def cursor_pos(self, pos): """ Set the cursor position """ if pos >= 0: self._cursor_index = max(0, min(len(self._value), pos)) else: self._cursor_index = max(len(self._value) + pos + 1, 0) self._reset_cursor_tick() self._render_text() def on_tick(self, event): """ Tick handler, gets executed every frame """ frame_time = globalClock.get_frame_time() - self._tickstart show_cursor = frame_time % self._tickrate < 0.5 * self._tickrate if show_cursor: self._cursor.color = (0.5, 0.5, 0.5, 1) else: self._cursor.color = (1, 1, 1, 0) def on_click(self, event): """ Internal on click handler """ self.request_focus() def on_mousedown(self, event): """ Internal mousedown handler """ local_x_offset = self._text.text_handle.get_relative_pos(event.coordinates).x self.cursor_pos = self._text.text_handle.get_char_index(local_x_offset) def _reset_cursor_tick(self): """ Internal method to reset the cursor tick """ self._tickstart = globalClock.get_frame_time() def on_focus(self, event): """ Internal focus handler """ self._cursor.show() self._placeholder.hide() self._reset_cursor_tick() self._layout.color = (0.9, 0.9, 0.9, 1) def on_keydown(self, event): """ Internal keydown handler. Processes the special keys, and if none are present, redirects the event """ key_name = event.message if key_name == "backspace": self._value = self._value[:max(0, self._cursor_index - 1)] + self._value[self._cursor_index:] self.cursor_pos -= 1 self.trigger_event("changed", self._value) elif key_name == "delete": post_value = self._value[min(len(self._value), self._cursor_index + 1):] self._value = self._value[:self._cursor_index] + post_value self.cursor_pos = self._cursor_index self.trigger_event("changed", self._value) elif key_name == "arrow_left": if event.get_modifier_state("alt") or event.get_modifier_state("ctrl"): self.cursor_skip_left() else: self.cursor_pos -= 1 elif key_name == "arrow_right": if event.get_modifier_state("alt") or event.get_modifier_state("ctrl"): self.cursor_skip_right() else: self.cursor_pos += 1 elif key_name == "escape": self.blur() elif key_name == "home": self.cursor_pos = 0 elif key_name == "end": self.cursor_pos = len(self.value) self.trigger_event(key_name, self._value) def on_keyrepeat(self, event): """ Internal keyrepeat handler """ self.on_keydown(event) def on_textinput(self, event): """ Internal textinput handler """ self._value = self._value[:self._cursor_index] + event.message + \ self._value[self._cursor_index:] self.cursor_pos = self._cursor_index + len(event.message) self.trigger_event("changed", self._value) def on_blur(self, event): """ Internal blur handler """ self._cursor.hide() if len(self._value) < 1: self._placeholder.show() self._layout.color = (1, 1, 1, 1) def _render_text(self): """ Internal method to render the text """ self._text.set_text(self._value) self._cursor.left = self._text.left + \ self._text.text_handle.get_char_pos(self._cursor_index) + 1 max_left = self.width - 15 if self._value: self._placeholder.hide() else: if not self.focused: self._placeholder.show() # Scroll if the cursor is outside of the clip bounds rel_pos = self.get_relative_pos(self._cursor.get_abs_pos()).x if rel_pos >= max_left: self._text_scroller.left = min(0, max_left - self._cursor.left) if rel_pos <= 0: self._text_scroller.left = min(0, - self._cursor.left - rel_pos) def cursor_skip_left(self): """ Moves the cursor to the left, skipping the previous word """ left_hand_str = ''.join(reversed(self.value[0:self.cursor_pos])) match = self.re_skip.match(left_hand_str) if match is not None: self.cursor_pos -= match.end() - 1 else: self.cursor_pos = 0 def cursor_skip_right(self): """ Moves the cursor to the right, skipping the next word """ right_hand_str = self.value[self.cursor_pos:] match = self.re_skip.match(right_hand_str) if match is not None: self.cursor_pos += match.end() - 1 else: self.cursor_pos = len(self.value)
anupamaloke/Dell-EMC-Ansible-Modules-for-iDRAC
library/dellemc_idrac_nic.py
Python
gpl-3.0
16,156
0.001795
#! /usr/bin/python # _*_ coding: utf-8 _*_ # # Dell EMC OpenManage Ansible Modules # # Copyright © 2017 Dell Inc. or its subsidiaries. All rights reserved. # Dell, EMC, and other trademarks are trademarks of Dell Inc. or its # subsidiaries. Other trademarks may be trademarks of their respective owners. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: dellemc_idrac_nic short_description: Configure iDRAC Network settings version_added: "2.3" description: - Configure iDRAC Network settings options: idrac_ip: required: True description: - iDRAC IP Address type: 'str' idrac_user: required: True description: - iDRAC user name type: 'str' idrac_pwd: required: False description: - iDRAC user password type: 'str' idrac_port: required: False description: - iDRAC port default: 443 type: 'int' share_name: required: True description: - CIFS or NFS Network share share_user: required: True description: -
Network share user in the format user@domain if user is part of a domain else 'user' type: 'str' share_pwd: required: True description: - Network share user password type: 'str' share_mnt: required: True description: - Local mount path of the network file share with read-write permission for ansibl
e user type: 'path' nic_selection: required: False description: - NIC Selection mode choices: ['Dedicated','LOM1','LOM2','LOM3','LOM4'] default: "Dedicated" nic_failover: required: False description: - Failover network if NIC selection fails choices: ["None", "LOM1", "LOM2", "LOM3", "LOM4", "All"] default: "None" nic_autoneg: required: False description: - if C(True), will enable auto negotiation - if C(False), will disable auto negotiation default: False nic_speed: required: False description: - Network Speed choices: ["10", "100", "1000"] default: "1000" nic_duplex: required: False description: - if C(Full), will enable the Full-Duplex mode - if C(Half), will enable the Half-Duplex mode choices: ["Full", "Half"] default: "Full" nic_autodedicated: required: False description: - if C(True), will enable the auto-dedicated NIC option - if C(False), will disable the auto-dedicated NIC option default: False requirements: ['omsdk'] author: "anupam.aloke@dell.com" ''' EXAMPLES = ''' # Configure NIC Selection using a CIFS Network share - name: Configure NIC Selection dellemc_idrac_nic: idrac_ip: "192.168.1.1" idrac_user: "root" idrac_pwd: "calvin" share_name: "\\\\192.168.10.10\\share" share_user: "user1" share_pwd: "password" share_mnt: "/mnt/share" nic_selection: "Dedicated" state: "enable" ''' RETURN = ''' ''' import traceback from ansible.module_utils.dellemc_idrac import iDRACConnection from ansible.module_utils.basic import AnsibleModule try: from omsdk.sdkcenum import TypeHelper from omdrivers.enums.iDRAC.iDRAC import ( AutoConfig_NICTypes, Autoneg_NICTypes, DHCPEnable_IPv4Types, DNSDomainFromDHCP_NICStaticTypes, DNSFromDHCP_IPv4StaticTypes, DNSRegister_NICTypes, Duplex_NICTypes, Enable_IPv4Types, Enable_NICTypes, Failover_NICTypes, Selection_NICTypes, Speed_NICTypes, VLanEnable_NICTypes ) HAS_OMSDK = True except ImportError: HAS_OMSDK = False def _setup_nic(idrac, module): """ Setup iDRAC NIC attributes Keyword arguments: idrac -- iDRAC handle module -- Ansible module """ # Get the current NIC settings curr_nic_selection = idrac.config_mgr._sysconfig.iDRAC.NIC.Selection_NIC curr_nic_failover = idrac.config_mgr._sysconfig.iDRAC.NIC.Failover_NIC curr_nic_autoneg = idrac.config_mgr._sysconfig.iDRAC.NIC.Autoneg_NIC idrac.config_mgr._sysconfig.iDRAC.NIC.Enable_NIC = \ TypeHelper.convert_to_enum(module.params['nic_enable'], Enable_NICTypes) idrac.config_mgr._sysconfig.iDRAC.NIC.Selection_NIC = \ TypeHelper.convert_to_enum(module.params['nic_selection'], Selection_NICTypes) # NIC Selection mode and failover mode should not be same if module.params['nic_selection'] == module.params['nic_failover']: module.fail_json(msg="NIC Selection mode and Failover mode cannot be same") elif curr_nic_selection != Selection_NICTypes.Dedicated and \ module.params['nic_selection'] != 'Dedicated': idrac.config_mgr._sysconfig.iDRAC.NIC.Failover_NIC = \ TypeHelper.convert_to_enum(module.params['nic_failover'], Failover_NICTypes) # if NIC Selection is not 'Dedicated', then Auto-Negotiation is always ON if curr_nic_selection != Selection_NICTypes.Dedicated and \ module.params['nic_selection'] != 'Dedicated': idrac.config_mgr._sysconfig.iDRAC.NIC.Autoneg_NIC = Autoneg_NICTypes.Enabled else: idrac.config_mgr._sysconfig.iDRAC.NIC.Autoneg_NIC = \ TypeHelper.convert_to_enum(module.params['nic_autoneg'], Autoneg_NICTypes) # NIC Speed and Duplex mode can only be set when Auto-Negotiation is not ON if curr_nic_autoneg != Autoneg_NICTypes.Enabled and \ module.params['nic_autoneg'] != 'Enabled': if curr_nic_selection != Selection_NICTypes.Enabled and \ module.params['nic_selection'] != 'Dedicated': idrac.config_mgr._sysconfig.iDRAC.NIC.Speed_NIC = Speed_NICTypes.T_100 else: idrac.config_mgr._sysconfig.iDRAC.NIC.Speed_NIC = \ TypeHelper.convert_to_enum(module.params['nic_speed'], Speed_NICTypes) idrac.config_mgr._sysconfig.iDRAC.NIC.Duplex_NIC = \ TypeHelper.convert_to_enum(module.params['nic_duplex'], Duplex_NICTypes) idrac.config_mgr._sysconfig.iDRAC.NIC.MTU_NIC = module.params['nic_mtu'] # DNS Registration idrac.config_mgr._sysconfig.iDRAC.NIC.DNSRegister_NIC = \ TypeHelper.convert_to_enum(module.params['dns_register'], DNSRegister_NICTypes) if module.params['dns_idrac_name']: idrac.config_mgr._sysconfig.iDRAC.NIC.DNSRacName = module.params['dns_idrac_name'] # Enable Auto-Config if module.params['nic_auto_config'] != 'Disabled': if module.params['ipv4_enable'] != 'Enabled' or \ module.params['ipv4_dhcp_enable'] != 'Enabled': module.fail_json(msg="IPv4 and DHCPv4 must be enabled for Auto-Config") idrac.config_mgr._sysconfig.iDRAC.NIC.AutoConfig_NIC = \ TypeHelper.convert_to_enum(module.params['nic_auto_config'], AutoConfig_NICTypes) # VLAN idrac.config_mgr._sysconfig.iDRAC.NIC.VLanEnable_NIC = \ TypeHelper.convert_to_enum(module.params['vlan_enable'], VLanEnable_NICTypes) idrac.config_mgr._sysconfig.iDRAC.NIC.VLanID_NIC = module.params['vlan_id'] idrac.config_mgr._sysconfig.iDRAC.NIC.VLanPriority_NIC = module.
pat-odoo/TwoRC522_RPi2-3
module/gpio.py
Python
gpl-3.0
391
0.01023
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Class in Python 2.7 for incorporation o
f the RPi.GPIO module to control the GPIO channels of Raspberry Pi. """ import RPi.GPIO as GPIO __author__ = "" __copyright__ = "" __email__ = "" __status__ = "Prototype" class PinsGPIO(object): gpio = None def __init__(self):
self.gpio = GPIO
SINTEFMedtek/CTK
Applications/ctkSimplePythonShell/Testing/Python/wrappedVTKQInvokableTest.py
Python
apache-2.0
531
0.001883
from __future__ import print_function import qt # Importing vtk initializes vtkPythonMap owned by vtkPythonUtil and prevent # call to vtkPythonUtil::GetObjectFromPointer() from segfaulting. # PythonQt internally uses vtkPythonUtil to properly wrap/unwrap VTK objects
from vtk import * t = _testWrappedVTKQInvokableInstance.getTable() print(t.GetClassName()) t2 = vtkTable() _testWrappedVTKQInvokableInstance.setTable(t2) if _testWrappedVTKQInvokableInstance.getTable()
!= t2: qt.QApplication.exit(1) qt.QApplication.exit(0)
amandolo/ansible-modules-core
system/setup.py
Python
gpl-3.0
5,256
0.005327
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: setup version_added: historical short_description: Gathers facts about remote hosts options: filter: version_added: "1.1" description: - if supplied, only return facts that match this shell-style (fnmatch) wildcard. required: false default: '*' fact_path: version_added: "1.3" description: - path used for local ansible facts (*.fact) - files in this dir will be run (if executable) and their results be added to ansible_local facts if a file is not executable it is read. File/results format can be json or ini-format required: false default: '/etc/ansible/facts.d' description: - This module is automatically called by playbooks to gather useful variables about remote hosts that can be used in
playbooks. It can also be executed directly by C(/usr/bin/ansible) to check what variables are available to a host. Ansible provides many I(facts) about the system, automatically. notes: - More ansible facts will be added with successive releases. If I(facter) or I(ohai) are installed, variables from these programs will also be sna
pshotted into the JSON file for usage in templating. These variables are prefixed with C(facter_) and C(ohai_) so it's easy to tell their source. All variables are bubbled up to the caller. Using the ansible facts and choosing to not install I(facter) and I(ohai) means you can avoid Ruby-dependencies on your remote systems. (See also M(facter) and M(ohai).) - The filter option filters only the first level subkey below ansible_facts. - If the target host is Windows, you will not currently have the ability to use C(fact_path) or C(filter) as this is provided by a simpler implementation of the module. Different facts are returned for Windows hosts. author: - "Ansible Core Team" - "Michael DeHaan" ''' EXAMPLES = """ # Display facts from all hosts and store them indexed by I(hostname) at C(/tmp/facts). ansible all -m setup --tree /tmp/facts # Display only facts regarding memory found by ansible on all hosts and output them. ansible all -m setup -a 'filter=ansible_*_mb' # Display only facts returned by facter. ansible all -m setup -a 'filter=facter_*' # Display only facts about certain interfaces. ansible all -m setup -a 'filter=ansible_eth[0-2]' """ def run_setup(module): setup_options = dict(module_setup=True) facts = ansible_facts(module) for (k, v) in facts.items(): setup_options["ansible_%s" % k.replace('-', '_')] = v # Look for the path to the facter and ohai binary and set # the variable to that path. facter_path = module.get_bin_path('facter') ohai_path = module.get_bin_path('ohai') # if facter is installed, and we can use --json because # ruby-json is ALSO installed, include facter data in the JSON if facter_path is not None: rc, out, err = module.run_command(facter_path + " --puppet --json") facter = True try: facter_ds = json.loads(out) except: facter = False if facter: for (k,v) in facter_ds.items(): setup_options["facter_%s" % k] = v # ditto for ohai if ohai_path is not None: rc, out, err = module.run_command(ohai_path) ohai = True try: ohai_ds = json.loads(out) except: ohai = False if ohai: for (k,v) in ohai_ds.items(): k2 = "ohai_%s" % k.replace('-', '_') setup_options[k2] = v setup_result = { 'ansible_facts': {} } for (k,v) in setup_options.items(): if module.params['filter'] == '*' or fnmatch.fnmatch(k, module.params['filter']): setup_result['ansible_facts'][k] = v # hack to keep --verbose from showing all the setup module results setup_result['verbose_override'] = True return setup_result def main(): global module module = AnsibleModule( argument_spec = dict( filter=dict(default="*", required=False), fact_path=dict(default='/etc/ansible/facts.d', required=False), ), supports_check_mode = True, ) data = run_setup(module) module.exit_json(**data) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.facts import * main()
cangermueller/deepcpg
tests/deepcpg/data/test_annos.py
Python
mit
2,793
0
from __future__ import division from __future__ import print_function import numpy as np import numpy.testing as npt import pandas as pd from deepcpg.data import annotations as annos def test_join_overlapping(): f = annos.join_overlapping s, e = f([], []) assert len(s) == 0 assert len(e) == 0 s = [1, 3, 6] e = [2, 4, 10] expect = (s, e) result = f(
s, e) assert result == expect x = np.array([[1, 2], [3, 4], [4, 5], [6, 8], [8, 8], [8, 9], [10, 15], [10, 11], [11, 14], [14, 16]] ) expect = [[1, 2], [3, 5], [6, 9], [10, 16]] result = np.array(f(x[:, 0], x[:, 1])).T npt.assert_array_equal(result, expect) def test_in_which(): f = annos.in_which ys = [2, 4, 12, 17] ye = [2, 8, 15, 18] x = [] expect = [
] result = f(x, ys, ye) npt.assert_array_equal(result, expect) x = [-1, 3, 9, 19] expect = [-1, -1, -1, -1] result = f(x, ys, ye) npt.assert_array_equal(result, expect) x = [-1, 2, 2, 3, 4, 8, 15, 16] expect = [-1, 0, 0, -1, 1, 1, 2, -1] result = f(x, ys, ye) npt.assert_array_equal(result, expect) def test_is_in(): ys = [2, 4, 12, 17] ye = [2, 8, 15, 18] x = [-1, 2, 2, 3, 4, 8, 15, 16] expect = [False, True, True, False, True, True, True, False] result = annos.is_in(x, ys, ye) npt.assert_array_equal(result, expect) def test_distance(): start = [3, 10, 17] end = [6, 15, 18] pos = [1, 2, 5, 8, 10, 15, 16, 19] expect = [2, 1, 0, 2, 0, 0, 1, 1] start = np.asarray(start) end = np.asarray(end) pos = np.asarray(pos) actual = annos.distance(pos, start, end) npt.assert_array_equal(actual, expect) pos = [1, 6, 7, 9] expect = [2, 0, 1, 1] start = np.asarray(start) end = np.asarray(end) pos = np.asarray(pos) actual = annos.distance(pos, start, end) npt.assert_array_equal(actual, expect) def test_extend_frame(): d = pd.DataFrame({ 'chromo': '1', 'start': [2, 3, 3, 1, 1], 'end': [3, 3, 8, 2, 1] }) d = d.loc[:, ['chromo', 'start', 'end']] expect = pd.DataFrame({ 'chromo': '1', 'start': [1, 2, 3, 1, 1], 'end': [4, 5, 8, 4, 4] }) expect = expect.loc[:, ['chromo', 'start', 'end']] actual = annos.extend_len_frame(d, 4) npt.assert_array_equal(actual.values, expect.values) def test_group_overlapping(): npt.assert_array_equal(annos.group_overlapping([], []), []) npt.assert_array_equal(annos.group_overlapping([1], [2]), [0]) s = [1, 5, 7, 11, 13, 16, 22] e = [3, 8, 9, 15, 17, 20, 24] g = [0, 1, 1, 2, 2, 2, 3] a = annos.group_overlapping(s, e) npt.assert_array_equal(a, g)
jpotterm/django-fluent-contents
fluent_contents/tests/testapp/admin.py
Python
apache-2.0
360
0
from django.contrib import admin from fluent_contents.admin import PlaceholderFieldAdmin from .models import PlaceholderFieldTestPage class PlaceholderFieldTestPageAdmin(PlaceholderFieldAdmin):
""" Admin interface for the PlaceholderFieldTestPage model. """ pass admin.site.register(Plac
eholderFieldTestPage, PlaceholderFieldTestPageAdmin)
xclxxl414/rqalpha
rqalpha/utils/package_helper.py
Python
apache-2.0
953
0
# -*- coding: utf-8 -*- # # Copyright 2017 Ricequant, Inc # # Licensed under
the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.a
pache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from rqalpha.utils.logger import system_log def import_mod(mod_name): try: from importlib import import_module return import_module(mod_name) except Exception as e: system_log.error("*" * 30) system_log.error("Mod Import Error: {}, error: {}", mod_name, e) system_log.error("*" * 30) raise
edisonlz/fruit
web_project/base/site-packages/django/contrib/syndication/views.py
Python
apache-2.0
8,515
0.007634
from __future__ import unicode_literals from calendar import timegm from django.conf import settings from django.contrib.sites.models import get_current_site from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist from django.http import HttpResponse, Http404 from django.template import loader, TemplateDoesNotExist, RequestContext from django.utils import feedgenerator, tzinfo from django.utils.encoding import force_text, iri_to_uri, smart_text from django.utils.html import escape from django.utils.http import http_date from django.utils import six from django.utils.timezone import is_naive def add_domain(domain, url, secure=False): protocol = 'https' if secure else 'http' if url.startswith('//'): # Support network-path reference (see #16753) - RSS requires a protocol url = '%s:%s' % (protocol, url) elif not (url.startswith('http://') or url.startswith('https://') or url.startswith('mailto:')): url = iri_to_uri('%s://%s%s' % (protocol, domain, url)) return url class FeedDoesNotExist(ObjectDoesNotExist): pass class Feed(object): feed_type = feedgenerator.DefaultFeed title_template = None description_template = None def __call__(self, request, *args, **kwargs): try: obj = self.get_object(request, *args, **kwargs) except ObjectDoesNotExist: raise Http404('Feed object does not exist.') feedgen = self.get_feed(obj, request) response = HttpResponse(content_type=feedgen.mime_type) if hasattr(self, 'item_pubdate'): # if item_pubdate is defined for the feed, set header so as # ConditionalGetMiddleware is able to send 304 NOT MODIFIED response['Last-Modified'] = http_date( timegm(feedgen.latest_post_date().utctimetuple())) feedgen.write(response, 'utf-8') return response def item_title(self, item): # Titles should be double escaped by default (see #6533) return escape(force_text(item)) def item_description(self, item): return force_text(item) def item_link(self, item): try: return item.get_absolute_url() except AttributeError: raise ImproperlyConfigured('Give your %s class a get_absolute_url() method, or define an item_link() method in your Feed class.' % item.__class__.__name__) def __get_dynamic_attr(self, attname, obj, default=None): try: attr = getattr(self, attname) except AttributeError: return default if callable(attr): # Check co_argcount rather than try/excepting the function and # catching the TypeError, because something inside the function # may raise the TypeError. This technique is more accurate. try: code = six.get_function_code(attr) except AttributeError: code = six.get_function_code(attr.__call__) if code.co_argcount == 2: # one argument is 'self' return attr(obj) else: return attr() return attr def feed_extra_kwargs(self, obj): """ Returns an extra keyword arguments dictionary that is used when initializing the feed generator. """ return {} def item_extra_kwargs(self, item): """ Returns an extra keyword arguments dictionary that is used with the `add_item` call of the feed generator. """ return {} def get_object(self, request, *args, **kwargs): return None
def get_context_data(self, **kwargs): """ Returns a dictionary to use as extra context if either ``self.description_template`` or ``self.item_template`` are used. Default implementation preserves the old behavior of using {'obj': item, 'site'
: current_site} as the context. """ return {'obj': kwargs.get('item'), 'site': kwargs.get('site')} def get_feed(self, obj, request): """ Returns a feedgenerator.DefaultFeed object, fully populated, for this feed. Raises FeedDoesNotExist for invalid parameters. """ current_site = get_current_site(request) link = self.__get_dynamic_attr('link', obj) link = add_domain(current_site.domain, link, request.is_secure()) feed = self.feed_type( title = self.__get_dynamic_attr('title', obj), subtitle = self.__get_dynamic_attr('subtitle', obj), link = link, description = self.__get_dynamic_attr('description', obj), language = settings.LANGUAGE_CODE, feed_url = add_domain( current_site.domain, self.__get_dynamic_attr('feed_url', obj) or request.path, request.is_secure(), ), author_name = self.__get_dynamic_attr('author_name', obj), author_link = self.__get_dynamic_attr('author_link', obj), author_email = self.__get_dynamic_attr('author_email', obj), categories = self.__get_dynamic_attr('categories', obj), feed_copyright = self.__get_dynamic_attr('feed_copyright', obj), feed_guid = self.__get_dynamic_attr('feed_guid', obj), ttl = self.__get_dynamic_attr('ttl', obj), **self.feed_extra_kwargs(obj) ) title_tmp = None if self.title_template is not None: try: title_tmp = loader.get_template(self.title_template) except TemplateDoesNotExist: pass description_tmp = None if self.description_template is not None: try: description_tmp = loader.get_template(self.description_template) except TemplateDoesNotExist: pass for item in self.__get_dynamic_attr('items', obj): context = self.get_context_data(item=item, site=current_site, obj=obj, request=request) if title_tmp is not None: title = title_tmp.render(RequestContext(request, context)) else: title = self.__get_dynamic_attr('item_title', item) if description_tmp is not None: description = description_tmp.render(RequestContext(request, context)) else: description = self.__get_dynamic_attr('item_description', item) link = add_domain( current_site.domain, self.__get_dynamic_attr('item_link', item), request.is_secure(), ) enc = None enc_url = self.__get_dynamic_attr('item_enclosure_url', item) if enc_url: enc = feedgenerator.Enclosure( url = smart_text(enc_url), length = smart_text(self.__get_dynamic_attr('item_enclosure_length', item)), mime_type = smart_text(self.__get_dynamic_attr('item_enclosure_mime_type', item)) ) author_name = self.__get_dynamic_attr('item_author_name', item) if author_name is not None: author_email = self.__get_dynamic_attr('item_author_email', item) author_link = self.__get_dynamic_attr('item_author_link', item) else: author_email = author_link = None pubdate = self.__get_dynamic_attr('item_pubdate', item) if pubdate and is_naive(pubdate): ltz = tzinfo.LocalTimezone(pubdate) pubdate = pubdate.replace(tzinfo=ltz) feed.add_item( title = title, link = link, description = description, unique_id = self.__get_dynamic_attr('item_guid', item, link), unique_id_is_permalink = self.__get_dynamic_attr( 'item_guid_is_permalink', item), enclosure = enc, pubdate = pubdate, author_name = author_n
jamesaud/se1-group4
address/models.py
Python
mit
9,864
0.004562
from django.db import models from django.core.exceptions import ValidationError from django.db.models.fields.related import ForeignObject try: from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor except ImportError: from django.db.models.fields.related import ReverseSingleRelatedObjectDescriptor as ForwardManyToOneDescriptor from django.utils.encoding import python_2_unicode_compatible import logging logger = logging.getLogger(__name__) # Python 3 fixes. import sys if sys.version > '3': long = int basestring = (str, bytes) unicode = str __all__ = ['Country', 'State', 'Locality', 'Address', 'AddressField'] class InconsistentDictError(Exception): pass def _to_python(value): raw = value.get('raw', '') country = value.get('country', '') country_code = value.get('country_code', '') state = value.get('state', '') state_code = value.get('state_code', '') locality = value.get('locality', '') postal_code = value.get('postal_code', '') street_number = value.get('street_number', '') route = value.get('route', '') formatted = value.get('formatted', '') latitude = value.get('latitude', None) longitude = value.get('longitude', None) # If there is no value (empty raw) then return None. if not raw: return None # If we have an inconsistent set of value bail out now. if (country or state or locality) and not (country and state and locality): raise InconsistentDictError # Handle the country. try: country_obj = Country.objects.get(name=country) except Country.DoesNotExist: if country: if len(country_code) > Country._meta.get_field('code').max_length: if country_code != country: raise ValueError('Invalid country code (too long): %s'%country_code) country_code = '' country_obj = Country.objects.create(name=country, code=country_code) else:
country_obj = None # Handle the state. try: state_obj = State.objects.get(name=state, country=country_obj) except State.DoesNotExist: if state: i
f len(state_code) > State._meta.get_field('code').max_length: if state_code != state: raise ValueError('Invalid state code (too long): %s'%state_code) state_code = '' state_obj = State.objects.create(name=state, code=state_code, country=country_obj) else: state_obj = None # Handle the locality. try: locality_obj = Locality.objects.get(name=locality, state=state_obj) except Locality.DoesNotExist: if locality: locality_obj = Locality.objects.create(name=locality, postal_code=postal_code, state=state_obj) else: locality_obj = None # Handle the address. try: if not (street_number or route or locality): address_obj = Address.objects.get(raw=raw) else: address_obj = Address.objects.get( street_number=street_number, route=route, locality=locality_obj ) except Address.DoesNotExist: address_obj = Address( street_number=street_number, route=route, raw=raw, locality=locality_obj, formatted=formatted, latitude=latitude, longitude=longitude, ) # If "formatted" is empty try to construct it from other values. if not address_obj.formatted: address_obj.formatted = unicode(address_obj) # Need to save. address_obj.save() # Done. return address_obj ## ## Convert a dictionary to an address. ## def to_python(value): # Keep `None`s. if value is None: return None # Is it already an address object? if isinstance(value, Address): return value # If we have an integer, assume it is a model primary key. This is mostly for # Django being a cunt. elif isinstance(value, (int, long)): return value # A string is considered a raw value. elif isinstance(value, basestring): obj = Address(raw=value) obj.save() return obj # A dictionary of named address components. elif isinstance(value, dict): # Attempt a conversion. try: return _to_python(value) except InconsistentDictError: return Address.objects.create(raw=value['raw']) # Not in any of the formats I recognise. raise ValidationError('Invalid address value.') ## ## A country. ## @python_2_unicode_compatible class Country(models.Model): name = models.CharField(max_length=40, unique=True, blank=True) code = models.CharField(max_length=2, blank=True) # not unique as there are duplicates (IT) class Meta: verbose_name_plural = 'Countries' ordering = ('name',) def __str__(self): return '%s'%(self.name or self.code) ## ## A state. Google refers to this as `administration_level_1`. ## @python_2_unicode_compatible class State(models.Model): name = models.CharField(max_length=165, blank=True) code = models.CharField(max_length=3, blank=True) country = models.ForeignKey(Country, related_name='states') class Meta: unique_together = ('name', 'country') ordering = ('country', 'name') def __str__(self): txt = self.to_str() country = '%s'%self.country if country and txt: txt += ', ' txt += country return txt def to_str(self): return '%s'%(self.name or self.code) ## ## A locality (suburb). ## @python_2_unicode_compatible class Locality(models.Model): name = models.CharField(max_length=165, blank=True) postal_code = models.CharField(max_length=10, blank=True) state = models.ForeignKey(State, related_name='localities') class Meta: verbose_name_plural = 'Localities' unique_together = ('name', 'state') ordering = ('state', 'name') def __str__(self): txt = '%s'%self.name state = self.state.to_str() if self.state else '' if txt and state: txt += ', ' txt += state if self.postal_code: txt += ' %s'%self.postal_code cntry = '%s'%(self.state.country if self.state and self.state.country else '') if cntry: txt += ', %s'%cntry return txt ## ## An address. If for any reason we are unable to find a matching ## decomposed address we will store the raw address string in `raw`. ## @python_2_unicode_compatible class Address(models.Model): street_number = models.CharField(max_length=20, blank=True) route = models.CharField(max_length=100, blank=True) locality = models.ForeignKey(Locality, related_name='addresses', blank=True, null=True) raw = models.CharField(max_length=200) formatted = models.CharField(max_length=200, blank=True) latitude = models.FloatField(blank=True, null=True) longitude = models.FloatField(blank=True, null=True) class Meta: verbose_name_plural = 'Addresses' ordering = ('locality', 'route', 'street_number') # unique_together = ('locality', 'route', 'street_number') def __str__(self): if self.formatted != '': txt = '%s'%self.formatted elif self.locality: txt = '' if self.street_number: txt = '%s'%self.street_number if self.route: if txt: txt += ' %s'%self.route locality = '%s'%self.locality if txt and locality: txt += ', ' txt += locality else: txt = '%s'%self.raw return txt def clean(self): if not self.raw: raise ValidationError('Addresses may not have a blank `raw` field.') def as_dict(self): ad = dict( street_number=self.street_number, route=self.route, raw=self.raw, formatted=self.f
stephane-martin/salt-debian-packaging
salt-2016.3.3/salt/modules/boto_sqs.py
Python
apache-2.0
5,122
0.000976
# -*- coding: utf-8 -*- ''' Connection module for Amazon SQS .. versionadded:: 2014.7.0 :configuration: This module accepts explicit sqs credentials but can also utilize IAM roles assigned to the instance through Instance Profiles. Dynamic credentials are then automatically obtained from AWS API and no further configuration is necessary. More Information available at: .. code-block:: text http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html If IAM roles are not used you need to specify them either in a pillar or in the minion's config file: .. code-block:: yaml sqs.keyid: GKTADJGHEIQSXMKKRBJ08H sqs.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs A region may also be specified in the configuration: .. code-block:: yaml sqs.region: us-east-1 If a region is not specified, the default is us-east-1. It's also possible to specify key, keyid and region via a profile, either as a passed in dict, or as a string to pull from pillars or minion config: .. code-block:: yaml myprofile: keyid: GKTADJGHEIQSXMKKRBJ08H key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs region: us-east-1 :depends: boto ''' # keep lint from choking on _get_conn and _cache_id #pylint: disable=E0602 from __future__ import absolute_import # Import Python libs import logging import json import salt.ext.six as six log = logging.getLogger(__name__) # Import third party libs try: # pylint: disable=unused-import import boto import boto.sqs # pylint: enable=unused-import logging.getLogger('boto').setLevel(logging.CRITICAL) HAS_BOTO = True except ImportError: HAS_BOTO = False from salt.ext.six import string_types def __virtual__(): ''' Only load if boto libraries exist. ''' if not HAS_BOTO: return (False, 'The boto_sqs module could not be loaded: boto libraries not found') __utils__['boto.assign_funcs'](__name__, 'sqs', pack=__salt__) return True def exists(name, region=None, key=None, keyid=None, profile=None): ''' Check to see if a queue exists. CLI example:: salt myminion boto_sqs.exists myqueue region=us-east-1 ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if conn.get_queue(name): return True else: return False def create(name, region=None, key=None, keyid=None, profile=None): ''' Create an SQS queue. CLI example to create a queue:: salt myminion boto_sqs.create myqueue region=us-east-1 ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if not conn.get_queue(name): try: conn.create_queue(name) except boto.exception.SQSError: msg = 'Failed to create queue {0}'.format(name) log.error(msg) return False log.info('Created queue {0}'.format(name)) return True def delete(name, region=None, key=None, keyid=None, profile=None): ''' Delete an SQS queue. CLI example to delete a queue:: salt myminion boto_sqs.delete myqueue region=us-east-1 ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) queue_obj = conn.get_queue(name) if queue_obj: deleted_queue = conn.delete_queue(queue_obj) if not deleted_queue: msg = 'Failed to delete queue {0}'.format(name) log.error(msg) return False return True def get_attributes(name, region=None, key=None, keyid=None, profile=None): ''' Check to see if attributes are set on an SQS queue. CLI example:: salt myminion boto_sqs.get_attributes myqueue ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if not conn: return {} queue_obj = conn.get_queue(name) if not queue_obj: log.error('Queue {0} does not exist.'.format(name)) return {} return conn.get_queue_attributes(queue_obj) def set_attributes(name, attributes, region=None, key=None, keyid=None, profile=None): ''' Set attributes on an SQS queue. CLI example to set attributes on a queue:: salt myminion boto_sqs.set_attributes myqueue '{ReceiveMessageWaitTimeSeconds: 20}' region=us-east-1 ''' ret = True conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) queue_obj = conn.get_queue(name) if not queue_obj: log.error('Queue {0} does not exist.'.format(name)) ret = False if isinstance(attributes, string_types): attributes = json.loads(at
tributes) for attr, val in six.iteritems(attributes): attr_set = queue_obj.set_attribute(attr, val) if not attr_set: msg = 'Failed to set attribute {0} = {1} on queue {2}' log.error(msg.format(attr, val, name))
ret = False else: msg = 'Set attribute {0} = {1} on queue {2}' log.info(msg.format(attr, val, name)) return ret
kasbah/slim_looper
src/protocol/nanopb/tests/site_scons/site_init.py
Python
gpl-3.0
3,700
0.015135
import subprocess import sys import re try: # Make terminal colors work on windows import colorama colorama.init() except ImportError: pass def add_nanopb_builders(env): '''Add the necessary builder commands for nanopb tests.''' # Build command that runs a test program and saves the output def run_test(target, source, env): if len(source) > 1: infile = open(str(source[1])) else: infile = None args = [str(source[0])] if env.has_key('ARGS'): args.extend(env['ARGS']) pipe = subprocess.Popen(args, stdin = infile, stdout = open(str(target[0]), 'w'), stderr = sys.stderr) result = pipe.wait() if result == 0: print '\033[32m[ OK ]\033[0m Ran ' + str(source[0]) else: print '\033[31m[FAIL]\033[0m Program ' + str(source[0]) + ' returned ' + str(result) return result run_test_builder = Builder(action = run
_test, suffix = '.output')
env.Append(BUILDERS = {'RunTest': run_test_builder}) # Build command that decodes a message using protoc def decode_actions(source, target, env, for_signature): esc = env['ESCAPE'] dirs = ' '.join(['-I' + esc(env.GetBuildPath(d)) for d in env['PROTOCPATH']]) return '$PROTOC $PROTOCFLAGS %s --decode=%s %s <%s >%s' % ( dirs, env['MESSAGE'], esc(str(source[1])), esc(str(source[0])), esc(str(target[0]))) decode_builder = Builder(generator = decode_actions, suffix = '.decoded') env.Append(BUILDERS = {'Decode': decode_builder}) # Build command that encodes a message using protoc def encode_actions(source, target, env, for_signature): esc = env['ESCAPE'] dirs = ' '.join(['-I' + esc(env.GetBuildPath(d)) for d in env['PROTOCPATH']]) return '$PROTOC $PROTOCFLAGS %s --encode=%s %s <%s >%s' % ( dirs, env['MESSAGE'], esc(str(source[1])), esc(str(source[0])), esc(str(target[0]))) encode_builder = Builder(generator = encode_actions, suffix = '.encoded') env.Append(BUILDERS = {'Encode': encode_builder}) # Build command that asserts that two files be equal def compare_files(target, source, env): data1 = open(str(source[0]), 'rb').read() data2 = open(str(source[1]), 'rb').read() if data1 == data2: print '\033[32m[ OK ]\033[0m Files equal: ' + str(source[0]) + ' and ' + str(source[1]) return 0 else: print '\033[31m[FAIL]\033[0m Files differ: ' + str(source[0]) + ' and ' + str(source[1]) return 1 compare_builder = Builder(action = compare_files, suffix = '.equal') env.Append(BUILDERS = {'Compare': compare_builder}) # Build command that checks that each pattern in source2 is found in source1. def match_files(target, source, env): data = open(str(source[0]), 'rU').read() patterns = open(str(source[1])) for pattern in patterns: if pattern.strip() and not re.search(pattern.strip(), data, re.MULTILINE): print '\033[31m[FAIL]\033[0m Pattern not found in ' + str(source[0]) + ': ' + pattern return 1 else: print '\033[32m[ OK ]\033[0m All patterns found in ' + str(source[0]) return 0 match_builder = Builder(action = match_files, suffix = '.matched') env.Append(BUILDERS = {'Match': match_builder})
51reboot/actual_09_homework
09/tanshuai/cmdb_v6/user/views.py
Python
mit
8,533
0.006442
#!/usr/bin/env python # -*- coding: utf-8 -*- from flask import render_template, request, redirect, session, flash, url_for from functools import wraps from user import app import services2db import log2db import users import json import time import sys import asset reload(sys) sys.setdefaultencoding('gb18030') # 登录验证装饰器(登录页面加入验证会死循环) def login_required(func): @wraps(func) def wrapper(*args, **kwargs): if session.get('user') is None: return redirect('/') rt = func(*args, **kwargs) return rt return wrapper # 时间装饰器 def time_wrapper(func): @wraps(func) def wrapper(): print '计时开始:%s' % func.__name__ start = time.time() rt = func() print '计时结束:%s:%s' % (func.__name__,time.time() - start) return rt return wrapper # 根目录 @app.route('/') def index(): if session: return redirect('/users/') else: return render_template('login.html') # 登录页面 @app.route('/login/', methods=['POST', 'GET']) def login(): params = request.args if request.method == 'GET' else request.form username = params.get('username', '') password = params.get('password', '') if users.validate_login(username, password): print '登录成功' session['user'] = {'username': username} return redirect('/users/') else: return render_template('login.html', username=username, error=u'用户名或密码错误') # 登出页面 @app.route('/user/logout/') def logout(): session.clear() return redirect('/') # 用户信息显示 @app.route('/users/') @login_required def user_list(): return render_template('users.html', user_list=users.get_users()) # 返回添加用户模版给dialog页面 @app.route('/user/adder/', methods=['POST', 'GET']) @login_required def user_create(): return render_template('user_create.html') # 添加用户 @app.route('/user/add/', methods=['POST']) def user_add(): params = request.args if request.method == 'GET' else request.form username = params.get('username', '') password = params.get('password', '') age = params.get('age', '') # 检查用户信息 _is_ok, _error = users.validate_add_user(username, password, age) _status = None if _is_ok: if users.add_users(username=username, age=age, password=password): _status = '添加用户成功!' else: _status = '添加用户失败!' return json.dumps({'is_ok': _is_ok, 'status': _status, 'error': _error}) # 返回更新用户模版给dialog页面 @app.route('/user/update/', methods=['POST', 'GET']) @login_required def user_update(): _id = request.args.get('id', '') _name = request.args.get('name', '') # _users = [] # for i in users.get_users(): # if i.get('id') == int(_id): # _users.append(i) return render_template('user_update.html', uid=_id, username=_name) # 更新用户 @app.route('/user/upd/', methods=['POST', 'GET']) def user_upd(): _id = request.form.get('id', '') _mpassword = request.form.get('mpassword', '') _upassword = request.form.get('upassword', '') _age = request.form.get('age', '') _is_ok, _error = users.validate_update_user(_id, session['user']['username'], _mpassword, _upassword, _age) _status = None if _is_ok: if users.update_users(_id, _upassword, _age): _status = '用户更新成功!' else: _status = '用户更新失败!' return json.dumps({'is_ok': _is_ok, 'status': _status, 'error': _error}) # 删除用户 @app.route('/user/delete/') @login_required def delete_user(): uid = request.args.get('uid', '') if users.del_users(uid): return redirect('/users/') else: return '用户删除失败' # 显示日志信息 @app.route('/logs/', methods=['POST', 'GET']) @time_wrapper @login_required def logs(): files = request.files.get('files') if files: # print files.filename files.save('./access.txt') log_files = 'access.txt' if log2db.log2db(log_files=log_files, fetch=False): return redirect('/logs/') else: return '日志写入数据库失败!' else: topn = request.form.get('topn', 10) topn = int(topn) if str(topn).isdigit() else 10 rt_list = log2db.log2db(topn=topn) # 读取数据 return render_template('logs.html', rt_list=rt_list)
# 显示域名管理信息 @app.route('/services/', methods=['POST', 'GET']) @login_required def service_manage(): params
= request.args if request.method == 'GET' else request.form _url = params.get('url', 'Null') _username = params.get('username', 'Null') _password = params.get('password', 'Null') _func = params.get('func', 'Null') # 添加域名管理信息 if _url != 'Null': if services2db.add_service(_url, _username, _password, _func): return redirect('/services/') else: return '添加信息失败!' # 查询域名管理信息 else: service_list = services2db.get_service() return render_template('services.html', service_list=service_list) # 更新域名管理信息 @app.route('/services/update/', methods=['POST']) def update_service(): params = request.args if request.method == 'GET' else request.form _id = params.get('id', '') _url = params.get('url', '') _username = params.get('username', '') _password = params.get('password', '') _func = params.get('func', '') _is_ok = services2db.update_service(_url, _username, _password, _func, _id) return json.dumps({'is_ok': _is_ok}) # 删除域名管理信息 @app.route('/services/del/') @login_required def serviceDel(): uid = request.args.get('id', '') if services2db.servicedel(uid): return redirect('/services/') else: return '域名管理信息删除失败!' # 资产信息显示 @app.route('/assets/') @login_required def asset_list(): _asset_list = [] for i in asset.get_list(): _rt_list = asset.get_by_id(i.get('idc_id')) i['idc_id'] = _rt_list[0][1] _asset_list.append(i) return render_template('assets.html', asset_list=_asset_list) # 返回新建资产模版给dialog页面 @app.route('/asset/create/', methods=['POST', 'GET']) @login_required def asset_create(): return render_template('asset_create.html', idcs=asset.get_idc()) # 添加资产信息 @app.route('/asset/add/', methods=['POST', 'GET']) @login_required def asset_add(): lists = ['sn','ip','hostname','idc_id','purchase_date','warranty','vendor','model','admin','business','os','cpu','ram','disk'] asset_dict = {} for i in lists: asset_dict['_'+i] = request.form.get(i, '') # 检查资产信息 _is_ok, _error = asset.validate_create(asset_dict) status = None if _is_ok: if asset.create(asset_dict): status = '添加资产成功!' else: status = '添加资产失败!' return json.dumps({'is_ok': _is_ok, 'status': status, 'error': _error}) # 删除资产信息 @app.route('/asset/delete/') @login_required def asset_del(): uid = request.args.get('id', '') if asset.delete(uid): return redirect('/assets/') else: return '资产删除失败!' # 返回更新资产模版给dialog页面 @app.route('/asset/update/', methods=['POST', 'GET']) @login_required def asset_update(): _id = request.args.get('id', '') _asset_list = [] for i in asset.get_list(): if i.get('id') == int(_id): _asset_list.append(i) return render_template('asset_update.html', asset_list=_asset_list, idcs=asset.get_idc()) # 更新资产信息 @app.route('/asset/upd/', methods=['POST', 'GET']) @login_required def asset_upd(): _id = request.form.get('id', '') assets = ['sn','ip','hostname','idc_id','purchase_date','warranty','vendor','model','admin','business','os','cpu','ram','disk'] asset_dict = {} for i in assets: asset_dict['_'+i] = request.form.get(i, '') # 检查资产信息 _is_ok, _error = asset.validate_update(asset_dict) _status = None if _is_ok: if asset.update(asset_dict,_id): _status = '更新资产成功!' else: _status = '更新资产失败!' return json.dumps({'is_ok': _is_ok, 'status': _status, 'error': _error})
ioram7/keystone-federado-pgid2013
build/eventlet/eventlet/support/greenlets.py
Python
apache-2.0
1,085
0.003687
import distutils.version try: import greenlet getcurrent = greenlet.greenlet.getcurrent GreenletExit = greenlet.greenlet.GreenletExit preserves_excinfo = (distutils.version.LooseVersion(greenlet.__version__) >= distutils.version.LooseVersion('0.3.2')) greenlet = greenlet.greenlet except ImportError, e: raise try: from py.magic import greenlet g
etcurrent = greenlet.getcurrent GreenletExit = greenlet.GreenletExit preserves_excinfo = False except ImportError: try: from stackless import greenlet getcurrent = greenlet.getcurrent GreenletExit = greenlet.GreenletExit preserves_excinfo = False
except ImportError: try: from support.stacklesss import greenlet, getcurrent, GreenletExit preserves_excinfo = False (greenlet, getcurrent, GreenletExit) # silence pyflakes except ImportError, e: raise ImportError("Unable to find an implementation of greenlet.")
Ishydo/miot
miot/forms.py
Python
mit
791
0.005057
from mapwidgets.widgets import GooglePointFieldWidget from miot.models import PointOfInterest, Page, Profile from django import forms class PointOfInterestForm(forms.ModelForm): '''The form for a point of interest.''' class Meta: model = PointOfInterest fields = ("name", "featured_i
mage", "position", "tags", "active", "category") widgets = { 'position': GooglePointFiel
dWidget, } class PageForm(forms.ModelForm): '''The form for a page.''' class Meta: model = Page fields = ("title", "content", "template") class EstimoteAppForm(forms.ModelForm): '''The form for a profile update (estimote credentials).''' class Meta: model = Profile fields = ("estimote_app_id", "estimote_app_token")
maximilianofaccone/puppy-siberian
root/.conky/gmail.py
Python
gpl-3.0
480
0.03125
import os import string #Enter your username and password below within double quotes # eg. username="username" and password="password" username="username" password="password" com="wget -O - https://"+username+":"+password+"@mail.google.com/
mail/feed/atom --no-check-certificate" temp=os.popen(com) msg=temp.read() index=string.find(msg,"<fullcount>") index2=string.find(msg,"</fullcount>") fc=int(
msg[index+11:index2]) if fc==0: print "0 new" else: print str(fc)+" new"
seerjk/reboot06
06/flask_web.py
Python
mit
1,609
0.003108
from flask import Flask, request, render_template app = Flask(__name__) @app.route('/') def index(): # return 'hello flask' # return '<input type="button" value="click me!!">' # return '<input type="text">' # return '<input type="password">' # return '<input type="date">' # return '<input type="color">' # return '<input type="checkbox">' return render_template('index.html') # @app.route('/reboot') @app.route('/reboot', methods=['GET']) def reboot(): # http://10.1.1.8:9092/reboot?name=abcb n
ame = request.args.get('name') age = request.args.ge
t('age') # print type(request.args) # print request.args # http://10.1.1.8:9092/reboot?name=abcb&age=15 return 'hello reboot, name: %s, age: %s' % (name, age) @app.route('/login') def login(): user = request.args.get('user') pwd = request.args.get('pwd') res = '' lines = [] user_dict = {} try: with open('user.txt') as f: lines = f.readlines() except: return -1 for line in lines: line = line.strip() name = line.split(' ')[0] passwd = line.split(' ')[1] user_dict[name] = passwd if user in user_dict: if str(pwd) == user_dict[user]: res = "yes, Login." else: res = "password is wrong, %s, %s" % (pwd, user_dict[user]) else: res = "user name not exist." # if user == 'admin' and pwd == 'admin': # res = 'ok' # else: # res = 'no' return res if __name__ == '__main__': app.run(host='0.0.0.0', port=9092, debug=True)
dc3-plaso/dfvfs
dfvfs/resolver/lvm_resolver_helper.py
Python
apache-2.0
1,206
0.003317
# -*- coding: utf-8 -*- """The LVM path specification resolver helper implementation.""" # This is necessary to prevent a circular import. import dfvfs.file_io.lvm_file_io import dfvfs.vfs.lvm_file_system from dfvfs.lib import definitions from dfvfs.resolver import resolver from dfvfs.resolver import resolver_helper class LVMResolverHelper(resolver_helpe
r.ResolverHelper): """Class that implements the Logical Volume Manager (LVM
) resolver helper.""" TYPE_INDICATOR = definitions.TYPE_INDICATOR_LVM def NewFileObject(self, resolver_context): """Creates a new file-like object. Args: resolver_context: the resolver context (instance of resolver.Context). Returns: The file-like object (instance of file_io.FileIO). """ return dfvfs.file_io.lvm_file_io.LVMFile(resolver_context) def NewFileSystem(self, resolver_context): """Creates a new file system object. Args: resolver_context: the resolver context (instance of resolver.Context). Returns: The file system object (instance of vfs.FileSystem). """ return dfvfs.vfs.lvm_file_system.LVMFileSystem(resolver_context) resolver.Resolver.RegisterHelper(LVMResolverHelper())
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-2.3/Lib/gopherlib.py
Python
mit
5,564
0.010065
"""Gopher protocol client interface.""" __all__ = ["send_selector","send_query"] # Default selector, host and port DEF_SELECTOR = '1/' DEF_HOST = 'gopher.micro.umn.edu' DEF_PORT = 70 # Recognized file types A_TEXT = '0' A_MENU = '1' A_CSO = '2' A_ERROR = '3' A_MACBINHEX = '4' A_PCBINHEX = '5' A_UUENCODED = '6' A_INDEX = '7' A_TELNET = '8' A_BINARY = '9' A_DUPLICATE = '+' A_SOUND = 's' A_EVENT = 'e' A_CALENDAR = 'c' A_HTML = 'h' A_TN3270 = 'T' A_MIME = 'M' A_IMAGE = 'I' A_WHOIS = 'w' A_QUERY = 'q' A_GIF = 'g' A_HTML = 'h' # HTML file A_WWW = 'w' # WWW address A_PLUS_IMAGE = ':' A_PLUS_MOVIE = ';' A_PLUS_SOUND = '<' _names = dir() _type_to_name_map = {} def type_to_name(gtype): """Map all file types to strings; unknown types become TYPE='x'.""" global _type_to_name_map if _type_to_name_map=={}: for name in _names: if name[:2] == 'A_': _type_to_name_map[eval(name)] = name[2:] if gtype in _type_to_name_map: return _type_to_name_map[gtype] return 'TYPE=' + `gtype` # Names for characters and strings CRLF = '\r\n' TAB = '\t' def send_selector(selector, host, port = 0): """Send a selector to a given host and port, return a file with the reply.""" import socket if not port: i = host.find(':') if i >= 0: host, port = host[:i], int(host[i+1:]) if not port: port = DEF_PORT elif type(port) == type(''): port = int(port) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, port)) s.sendall(selector + CRLF) s.shutdown(1) return s.makefile('rb') def send_query(selector, query, host, port = 0): """Send a selector and a query string.""" return send_selector(selector + '\t' + query, host, port) def path_to_selector(path): """Takes a path as returned by urlparse and returns the appropriate selector.""" if path=="/": return "/" else: return path[2:] # Cuts initial slash and data type identifier def path_to_datatype_name(path): """Takes a path as returned by urlparse and maps it to a string. See section 3.4 of RFC 1738 for details.""" if path=="/": # No way to tell, although "INDEX" is likely return "TYPE='unknown'" else: return type_to_name(path[1]) # The following functions interpret the data returned by the gopher # server according to the expected type, e.g. textfile or directory def get_directory(f): """Get a directory in the form of a list of entries.""" list = [] while 1: line = f.readline() if not line: print '(Unexpected EOF from server)' break if line[-2:] == CRLF: line = line[:-2] elif line[-1:] in CRLF: line = line[:-1] if line == '.': break if not line: print '(Empty line from server)' continue gtype = line[0] parts = line[1:].split(TAB) if len(parts) < 4: print '(Bad line from server:', `line`, ')' continue if len(parts) > 4: if parts[4:] != ['+']: print '(Extra info from server:', print parts[4:], ')' else: parts.append('') parts.insert(0, gtype) list.append(parts) return list def get_textfile(f): """Get a text file as a list of lines, with trailing CRLF stripped.""" list = [] get_alt_textfile(f, list.append) return list def get_alt_te
xtfile(f, func): """Get a text file and pass each line to a function, with trailing CRLF stripped.""" while 1: line = f.rea
dline() if not line: print '(Unexpected EOF from server)' break if line[-2:] == CRLF: line = line[:-2] elif line[-1:] in CRLF: line = line[:-1] if line == '.': break if line[:2] == '..': line = line[1:] func(line) def get_binary(f): """Get a binary file as one solid data block.""" data = f.read() return data def get_alt_binary(f, func, blocksize): """Get a binary file and pass each block to a function.""" while 1: data = f.read(blocksize) if not data: break func(data) def test(): """Trivial test program.""" import sys import getopt opts, args = getopt.getopt(sys.argv[1:], '') selector = DEF_SELECTOR type = selector[0] host = DEF_HOST if args: host = args[0] args = args[1:] if args: type = args[0] args = args[1:] if len(type) > 1: type, selector = type[0], type else: selector = '' if args: selector = args[0] args = args[1:] query = '' if args: query = args[0] args = args[1:] if type == A_INDEX: f = send_query(selector, query, host) else: f = send_selector(selector, host) if type == A_TEXT: list = get_textfile(f) for item in list: print item elif type in (A_MENU, A_INDEX): list = get_directory(f) for item in list: print item else: data = get_binary(f) print 'binary data:', len(data), 'bytes:', `data[:100]`[:40] # Run the test when run as script if __name__ == '__main__': test()
suselrd/django-wflow
workflows/decorators.py
Python
bsd-3-clause
6,481
0.003857
# coding=utf-8 from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db import models from django.utils.translation import ugettext_lazy as _ from models import WorkflowBase, State from utils import get_wf_dict_value def create_transition_method(transition_name, transition_condition=''): def transition_method(self, user, comment=None): transition = transition_name.lower() checker_name = "check_%s" % transition.replace(' ', '_') # default conditional method checker = getattr(self, checker_name, None) # specific conditional method condition_method = getattr(self, transition_condition, None) checked = (not checker or checker(user) is True) and (not condition_method or condition_method(user) is True) return self.do_transition(transition_name, user, comment) if checked else checked return transition_method def create_state_method(state_name): def state_method(self): try: state = State.objects.get(name=state_name, workflow=self.get_workflow()) except State.DoesNotExist: return False return self.get_state() == state return state_method def create_manager_state_method(state_name): def manager_state_method(self): queryset_method = getattr(self.get_queryset(), state_name.lower(), None) return queryset_method() if queryset_method else self.get_queryset() return manager_state_method def create_queryset_state_method(state_name): def queryset_state_method(self): return self.filter(current_state__name=state_name) return queryset_state_method def create_manager_get_queryset_method(manager, queryset_mixin): def manager_get_queryset_method(self): original_queryset = manager.get_queryset() queryset_class = original_queryset.__class__ class ExtendedQuerySet(queryset_mixin, queryset_class): pass new_queryset = ExtendedQuerySet(self.model, using=self._db) new_queryset.query = original_queryset.query.clone() return new_queryset return manager_get_queryset_method def workflow_enabled(cls): if models.Model not in cls.__mro__: raise ImproperlyConfigured(_('The decorator "workflow_enabled" only is applied to subclasses of Django Model')) bases = list(cls.__bases__) if not WorkflowBase in bases: bases.insert(0, WorkflowBase) cls.__bases__ = tuple(bases) current_state = models.ForeignKey(State, verbose_name=_(u"State"), name='current_state', null=True, blank=True) current_state.contribute_to_class(cls=cls, name='current_state') workflows_settings = getattr(settings, 'WORKFLOWS', {}) wf_item = workflows_settings.get("%s.%s" % (cls.__module__, cls.__name__), None) try: wf_name = wf_item['name'] except KeyError: raise ImproperlyConfigured('The attribute or key (name), must be specified in the workflow configuration.') # building transition methods transitions = get_wf_dict_value(wf_item, 'transitions', wf_name) for transition in transitions: name = get_wf_dict_value(transition, 'name', wf_name, 'transitions') condition = transition.get('condition', '') # building method name method_name = "do_%s" % name.lower().replace(' ', '_') # building method cls_transition_method = getattr(cls, method_name, None) if not cls_transition_method: setattr(cls, method_name, create_transition_method(name, condition)) class CustomQuerySetMixin(object): pass class CustomManagerMixin(object): def get_queryset(self): return CustomQuerySetMixin(self.model, using=self._db) cls._default_manager = CustomManagerMixin() # building state methods initial_state = get_wf_dict_value(wf_item, 'initial_state', wf_name) initial_state_name = get_wf_dict_value(initial_state, 'name', wf_name, 'initial_state') # building instance method instance_method_name = "is_%s" % initial_state_name.lower().replace(' ', '_') cls_instance_method = getattr(cls, instance_method_name, None) if not cls_instance_method: setattr(cls, instance_method_name, property(create_state_method(initial_state_name))) # building manager method manager_method_name = "%s" % initial_state_name.lower().replace(' ', '_') cls_manager_method = getattr(CustomManagerMixin, manager_method_name, None) if not cls_manager_method: setattr(CustomManagerMixin, manager_method_name, create_manager_state_method(initial_state_name)) cls_queryset_method = getattr(CustomQuerySetMixin, manager_method_name, None) if not cls_queryset_method: setattr(CustomQuerySetMixin, manager_method_name, create_queryset_state_method(initial_state_name)) states = get_wf_dict_value(wf_item, 'states', wf_name) for state in states: state_name = get_wf_dict_value(state, 'name', wf_name, 'states') # building method method_name = "is_%s" % state_name.lower().replace(' ', '_') cls_state_method = getattr(cls, method_name, None) if not cls_state_method: setattr(cls, method_name, property(create_state_method(state_name))) # building manager method manager_method_name = "%s" % state_name.lower().replace(' ', '_') cls_manager_method = getattr(CustomManagerMixin, manager_method_name, None) if not cls_manager_method: setattr(CustomManagerMixin, manager_method_name, create_manager_state_method(state_name)) cls_queryset_method = getattr(CustomQuerySetMixin, manager_method_name, None) if not cls_queryset_method: setattr(CustomQuerySetMixin, manager_method_name, create_queryset_state_method(state_name)) # extending manager cls._meta.concrete_managers.sort() managers = [(mgr_name, manager) for order, mgr_name, manager in cls._meta.concrete_managers] setattr(cls, '_default_manager', None) # clean the default manager setattr(cls._meta, 'concrete_managers', []) # clean the managers for mgr_name, manager in managers: class ExtendedManager(
Custo
mManagerMixin, manager.__class__): pass setattr(ExtendedManager, 'get_queryset', create_manager_get_queryset_method(manager, CustomQuerySetMixin)) cls.add_to_class(mgr_name, ExtendedManager()) return cls
bjodah/pycompilation
pycompilation/_release.py
Python
bsd-2-clause
31
0
__version__ = '0.5.0.dev0+
git
'
jacobajit/ion
intranet/middleware/environment.py
Python
gpl-2.0
1,848
0.002165
# -*- coding: utf-8 -*- import logging import os logger = logging.getLogger(__name__) class KerberosCacheMiddleware(object): """Reloads the KRB5CCNAME environmental variable from the session for potential use in future LDAP requests. For a login request, the KRB5CCNAME environmental variable has already been set in the authentication backend, but for all other requests, it must be reset from the Kerberos cache stored in a user's session. Otherwise all requests to a a particular Gunicorn worker would use the Kerberos cache of the u
ser who most recently logged in through that worker. The environmental variable must be set by middleware so it is available for requests to any view and so each view does not have to load the environmental variable. The LDAP wrapper (intranet.db.ldap_db) cannot set the environmental variable because it does not have access to the current session (request.session). """ def process_request(self, request): """Propogate KRB5CCNAME session
variable to the environmental variable.""" if "KRB5CCNAME" in request.session: # It is important to check that the environmental variable # matches the session variable because environmentals stay # on the worker after requests. if "KRB5CCNAME" in os.environ: if os.environ["KRB5CCNAME"] != request.session["KRB5CCNAME"]: logger.debug("Reloading KRB5CCNAME environmental variable from session.") os.environ["KRB5CCNAME"] = request.session["KRB5CCNAME"] else: logger.debug("KRB5CCNAME environmental variable not set - setting it to KRB5CCNAME from session vars.") os.environ["KRB5CCNAME"] = request.session["KRB5CCNAME"] return None
shanzi/thesiscode
topiccrawler.py
Python
bsd-2-clause
4,289
0.001632
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import time import json import random import urllib import logging import argparse import coloredlogs from pyquery import PyQuery BASEDIR = os.path.dirname(os.path.abspath(__name__)) OUTPUTDIR = os.path.join(BASEDIR, 'data/output') coloredlogs.install() class Topic(object): """Topic class is used for representing Topic on Zhihu""" def __init__(self, name, id_): """Init topic object with name and id :name: name of topic :id_: id of topic """ self._name = name self._id = id_ def __unicode__(self): return '[topic: %s (%d)]' % (self.name, self.id) def __repr__(self): return unicode(self) @property def name(self): return self._name @property def id(self): return self._id @property def url(self): return 'http://www.zhihu.com/topic/%d/questions' % self._id @property def filepath(self): return os.path.join(OUTPUTDIR, '%d.json' % self.id) @property def finished(self): return os.path.exists(self.filepath) def url_for_page(self, page_number): if page_number <= 1: return self.url return self.url + '?' + urllib.urlencode({'page': page_number}) def get_question(self, item): subtopicdom = item.children('.subtopic a') subtopic = subtopicdom.text().strip() subtopicid = int(subtopicdom.attr('href').split('/')[2]) if subtopicdom.attr('href') else self.id titledom = item.children('.question-item-title a') title = titledom.text().strip() questionid = int(titledom.attr('href').split('/')[2]) logging.debug('question: %s(%d)' % (title, questionid)) return { 'id': questionid, 'title': title, 'subtopic': { 'title': subtopic, 'id': subtopicid, }, } def get_questions(self, page): logging.info('processing: %s (page %d)' % (self, page)) url = self.url_for_page(page) logging.debug('fetching: %s' % url) items = PyQuery(url)('.feed-item') return [self.get_question(PyQuery(item)) for item in items] def persist(self, count=400): if self.finished: logging.info("skipped %s" % self) return page = 1 questions = [] logging.info("start fetching %s" % self) while len(questions) < count and page < 100: try: questions.extend(self.get_questions(page)) except Exception, e: logging.error("failed to fetch and parse %s(page %d)" % (self, page)) logging.exception(e) logging.debug("skipped %s(page %d)" % (self, page)) finally: page += 1 wait = random.randint(5, 20) logging.debug('wait for %d seconds' % wait) time.sleep(wait) if len(questions) == 0: logging.error("failed to fetch or parse %s" % self) return obj = { 'id': self.id, 'name': self.name, 'questions': questions, } logging.info('saving data for %s
' % self) logging.debug('writing path: %s' % self.filepath) with open(self.filepath, 'w') as f: json.dump(obj, f) def readtopics(path): topics = [] with open(path) as f: for l in f.readlines(): l = l.decode('utf8').strip() if not l: continue topi
cpair = l.split() topics.append((topicpair[0], int(topicpair[1]))) return topics if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("filename", help="The file which contains the topics to be processed") args = parser.parse_args() if args.filename.strip(): if not os.path.isdir(OUTPUTDIR): logging.debug('making output directory: %s' % OUTPUTDIR) os.mkdir(OUTPUTDIR) topics = readtopics(args.filename.strip()) logging.info('%d topics to process' % len(topics)) for tname, tid in topics: topic = Topic(tname, tid) topic.persist()
rdireen/spherepy
documentation/source/conf.py
Python
gpl-3.0
9,496
0.008951
# -*- coding: utf-8 -*- # # spherepy documentation build configuration file, created by # sphinx-quickstart on Sat Feb 7 21:35:42 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import mock MOCK_MODULES = ['numpy','six','six.moves','matplotlib','_csphi'] for mod in MOCK_MODULES: sys.modules[mod] = mock.Mock() import sphinx_bootstrap_theme # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../../')) #print(sys.path) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinx.ext.autodoc', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'spherepy' copyright = u'2015, Randy Direen, James Direen' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.0' # The full version, including alpha/beta/rc tags. release = '0.0.7' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. #html_theme = 'default' html_theme = 'bootstrap' html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() html_sidebars = { '**': ['localtoc.html', 'searchbox.html'], 'using/windows': ['windowssidebar.html', 'searchbox.html'], } # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = { # "collapsiblesidebar": "true" #} html_theme_options = { 'navbar_title': "SpherePy", 'navbar_site_name': "Site", 'navbar_links': [ ("DireenTech", "http://www.direentech.com", True), ], 'navbar_sidebarrel': False, 'navbar_pagenav': True, 'navbar_pagenav_name': "This Page", 'globaltoc_depth': 2, 'globaltoc_includehidden': "true", 'navbar_class': "navbar", 'source_link_position': "nfooter", 'bootstrap_version': "3", } # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = "_static/logo_spherepy.png" #PUT COOL PICTURE NEXT TO SPHEREPY AT TOP LEFT #html_logo = "_static/icon_spherepy.ico" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = "_static/icon_spherepy.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to t
his directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' tim
estamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'spherepydoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'spherepy.tex', u'spherepy Documentation', u'Randy Direen, James Direen', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index i
arborh/tensorflow
tensorflow/lite/testing/op_tests/relu.py
Python
apache-2.0
2,072
0.002896
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ===============================================
========================
======= """Test configs for relu.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf from tensorflow.lite.testing.zip_test_utils import create_tensor_data from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests from tensorflow.lite.testing.zip_test_utils import register_make_test_function @register_make_test_function() def make_relu_tests(options): """Make a set of tests to do relu.""" # Chose a set of parameters test_parameters = [{ "input_shape": [[], [1], [2, 3], [1, 1, 1, 1], [1, 3, 4, 3], [3, 15, 14, 3], [3, 1, 2, 4, 6], [2, 2, 3, 4, 5, 6]], "fully_quantize": [True, False], "input_range": [(-8, 8)] }] def build_graph(parameters): input_tensor = tf.compat.v1.placeholder( dtype=tf.float32, name="input", shape=parameters["input_shape"]) out = tf.nn.relu(input_tensor) return [input_tensor], [out] def build_inputs(parameters, sess, inputs, outputs): min_value, max_value = parameters["input_range"] input_values = create_tensor_data( np.float32, parameters["input_shape"], min_value, max_value) return [input_values], sess.run( outputs, feed_dict=dict(zip(inputs, [input_values]))) make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
bitforks/vanilla
Demos/TinyTextEditor/TinyTextEditor.py
Python
mit
1,067
0.005623
from AppKit import NSDocument from PyObjCTools import AppHelper from tinyTextEditorDocumentWindow import TinyTextEditorDocumentWindow class TinyTextEditorDocument(NSDocument): def init(self): self = super(TinyTextEditorDocument, self).init() se
lf.vanillaWindowController = TinyTextEditorDocumentWindow() self.vanillaWindowController.assignToDocument(self) return self def readFromFile_ofType_(self, path, tp): # refer to the NSDocument reference for information about this method f = open(path, 'rb') text = f.read() f.close() self.vanillaWindowController.setText(text) return True def writeWithBackupToFile_ofType_saveOperation_(self, fileName, fileT
ype, operation): # refer to the NSDocument reference for information about this method text = self.vanillaWindowController.getText() f = open(fileName, 'wb') f.write(text) f.close() return True if __name__ == "__main__": AppHelper.runEventLoop()
arielvega/uremix-app-developer-helper
src/uadh/gui/tkinter/__init__.py
Python
gpl-3.0
936
0.003205
# # # Copyright 2011,2013 Luis Ariel Vega Soliz, Uremix (http://www.uremix.org) and contributors. # # # This file is part of UADH (Uremix App Developer Helper). # # UADH is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public L
icense as published by # the Free Software Foun
dation, either version 3 of the License, or # (at your option) any later version. # # UADH is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with UADH. If not, see <http://www.gnu.org/licenses/>. # # ''' Created on 08/09/2012 @author: Luis Ariel Vega Soliz (ariel.vega@uremix.org) @contact: Uremix Team (http://uremix.org) '''
Ledoux/ShareYourSystem
Pythonlogy/build/lib/ShareYourSystem/Standards/Itemizers/Setter/11_ExampleDoc.py
Python
mit
1,233
0.042985
#ImportModules import ShareYourSystem as SYS #Define and set with #key dict for the KeyVariable MySetter=SYS.SetterClass( ).set( 'MyStr', 'HelloStr' ).set( {'#key':"MyStr"}, "hello" ) #Define and set with a #get in the value MySetter.set( "FirstCloneMyStr", '#get:MyStr' ) #Define and set with a recursive #get in the value MySetter.set( "FirstCloneHelloStr", '#get:#get:MyStr' ) #Define and set with a #value dict for the ValueVariable MySetter.set( "RedirectStr", {'#value':'MyStr'} ) #Define and set with a #value dict for the ValueVariable MySetter.set( "MyDict", {'#value':{'MyInt':0}} ) #Define and set with a #value:#get dict for the ValueVariable MySetter.set( "SecondCloneStr", {'#value:#get':'MyStr'} ) #Define and set with a #value:#map@get dict for the ValueVariable MySetter.set( "MyList", {'#value:#map@get':['MyStr','MyInt','#direc
t:FooStr']} ) #Define and set with a #value:#map@get dict for the ValueVariable MySetter.set( MySetter.MyList.append, {'#value':'MyStr'} ) #Define
and set with a #value:#map@get dict for the ValueVariable MySetter.set( MySetter.MyList.append, {'#value:#map@get':['MyInt']} ) #print print('MySetter is ') SYS._print(MySetter)