max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
qa327_test/integration/test_integration.py | 15vrs/cmpe-327 | 0 | 12772651 | import pytest
from seleniumbase import BaseCase
from qa327_test.conftest import base_url
from qa327.models import db, User, Ticket
import qa327.backend as bn
# integration testing: the test case interacts with the
# browser, and test the whole system (frontend+backend).
test_user = User(
email='<EMAIL>',
name='test frontend',
password='Password!',
balance=5000
)
test_ticket = Ticket(
owner = '<EMAIL>',
name = '<NAME>',
quantity = "1",
price = "20",
date = "20210901"
)
@pytest.mark.usefixtures('server')
class Integration(BaseCase):
def register(self):
"""register new user"""
self.open(base_url + '/register')
self.type("#email", test_user.email)
self.type("#name", test_user.name)
self.type("#password", <PASSWORD>)
self.type("#password2", <PASSWORD>)
self.click('input[type="submit"]')
def login(self):
""" Login to Swag Labs and verify that login was successful. """
self.open(base_url + '/login')
self.type("#email", test_user.email)
self.type("#password", <PASSWORD>)
self.click('input[type="submit"]')
def sell(self):
# create a ticket to sell
self.type("#sell-name", test_ticket.name)
self.type("#sell-quantity", test_ticket.quantity)
self.type("#sell-price", test_ticket.price)
self.type("#sell-date", test_ticket.date)
self.click("input.sell")
self.assert_element_not_visible("#message")
self.assert_element_visible("#tickets")
self.assert_text_visible(test_ticket.name, "#tickets")
def test_register_login(self):
""" This test checks the implemented login/logout feature """
bn.delete_database()
self.register()
self.login()
self.open(base_url)
self.assert_element_present("#welcome-header")
self.assert_text("Hi " + test_user.name, "#welcome-header")
# cleanup after test by removing registered user
def test_sell(self):
bn.delete_database()
# register new user and login
self.register()
self.login()
self.open(base_url)
self.sell()
def test_buy(self):
# register new user, login, and create ticket to buy
bn.delete_database()
self.register()
self.login()
self.open(base_url)
self.sell()
# buy the created ticket
self.type("#buy-name", test_ticket.name)
self.type("#buy-quantity", test_ticket.quantity)
self.click("input.buy")
self.assert_element_not_visible("#message")
self.assert_text_not_visible(test_ticket.name, "#tickets")
balance = bn.get_balance(test_user.email)
self.assertLess(balance, test_user.balance, "error with buying ticket - balance did not change")
| 2.484375 | 2 |
server/settings.py | AppCraftTeam/appcraft-logs | 0 | 12772652 | import os
from datetime import timedelta
from pathlib import Path
from dotenv import load_dotenv
BASE_DIR = Path(__file__).resolve().parent.parent
load_dotenv(dotenv_path=BASE_DIR / '.env')
SECRET_KEY = os.getenv('SECRET_KEY')
DEBUG = True if os.getenv('DEBUG') in ['true', 'True', True] else False
INSTALLED_APPS = [
# first place required
# base
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# third-party
'rest_framework',
# project apps
'users',
'appcraft_logging',
'seeds'
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'server.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
ASGI_APPLICATION = 'server.asgi.application'
WSGI_APPLICATION = 'server.wsgi.application'
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'ru-RU'
TIME_ZONE = 'Europe/Moscow'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# - - - -
# custom
# - - - -
ALLOWED_HOSTS = [
os.getenv('HOST_IP', '127.0.0.1'),
os.getenv('HOST_DOMAIN', 'localhost')
]
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework_simplejwt.authentication.JWTAuthentication',
),
'DEFAULT_RENDERER_CLASSES': (
'djangorestframework_camel_case.render.CamelCaseJSONRenderer',
'djangorestframework_camel_case.render.CamelCaseBrowsableAPIRenderer',
),
'DEFAULT_PARSER_CLASSES': (
'djangorestframework_camel_case.parser.CamelCaseFormParser',
'djangorestframework_camel_case.parser.CamelCaseMultiPartParser',
'djangorestframework_camel_case.parser.CamelCaseJSONParser',
),
'DEFAULT_PAGINATION_CLASS': 'backend.paginators.LimitOffsetPaginationDataAndCountOnly',
'PAGE_SIZE': 10,
'COERCE_DECIMAL_TO_STRING': False
}
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(days=365),
'REFRESH_TOKEN_LIFETIME': timedelta(days=365),
'AUTH_HEADER_TYPES': ['JWT'],
}
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BASE_DIR, 'files', 'static')
MEDIA_ROOT = os.path.join(BASE_DIR, 'files', 'media')
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
AUTH_USER_MODEL = 'users.UserModel'
SMS_AERO_EMAIL = os.getenv('SMS_AERO_EMAIL')
SMS_AERO_API_KEY = os.getenv('SMS_AERO_API_KEY')
APPCRAFT_LOGGING_CONFIG = {
'db_name': os.getenv('CLICKHOUSE_DB_NAME', 'db'),
'username': os.getenv('CLICKHOUSE_USERNAME', 'root'),
'password': os.getenv('CLICKHOUSE_PASSWORD', '<PASSWORD>'),
'port': os.getenv('CLICKHOUSE_PORT', '8123'),
}
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'django.contrib.staticfiles.finders.FileSystemFinder'
]
if DEBUG:
# debug_toolbar
INSTALLED_APPS.append('debug_toolbar')
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INTERNAL_IPS = ['127.0.0.1']
| 1.742188 | 2 |
stagesetting/drf.py | kezabelle/django-stagesetting | 5 | 12772653 | <reponame>kezabelle/django-stagesetting<filename>stagesetting/drf.py<gh_stars>1-10
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from rest_framework.fields import Field, DictField
from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import ModelViewSet
from stagesetting.models import RuntimeSetting
from stagesetting.utils import registry
class RawValueConversionField(DictField):
def to_representation(self, value):
if not isinstance(value, dict):
return registry.deserialize(value)
return super(RawValueConversionField, self).to_representation(value=value)
class RuntimeSettingSerializer(ModelSerializer):
value = RawValueConversionField(source='raw_value')
def validate(self, attrs):
model = self.Meta.model()
model.key = attrs['key']
model.value = attrs['raw_value']
return {'key': model.key, 'raw_value': model.raw_value}
class Meta:
model = RuntimeSetting
fields = ('key', 'value')
class SettingsViewSet(ModelViewSet):
queryset = RuntimeSetting.objects.all()
serializer_class = RuntimeSettingSerializer
| 1.90625 | 2 |
users/admin.py | vladcalin/konnector | 0 | 12772654 | <reponame>vladcalin/konnector
from django.contrib import admin
from users.models import User
# Register your models here.
admin.site.register(User)
| 1.304688 | 1 |
FLIRCam/USB_camera.py | johnnewto/FLIRCam | 0 | 12772655 | # AUTOGENERATED! DO NOT EDIT! File to edit: dev/52_USB_camera.ipynb (unless otherwise specified).
__all__ = ['Camera']
# Cell
from FLIRCam.core import *
# Cell
# Standard imports:
from pathlib import Path
import logging
from logging.handlers import RotatingFileHandler
from time import sleep, time as timestamp
from datetime import datetime
from threading import Thread, Event
from struct import pack as pack_data
# External imports:
import numpy as np
# Cell
import PySpin
class Camera():
"""Control acquisition and receive images from a camera.
To initialise a Camera a *model* (determines hardware interface) and *identity* (identifying the specific device)
must be given. If both are given to the constructor the Camera will be initialised immediately (unless
auto_init=False is passed). Manually initialise with a call to Camera.initialize(); release hardware with a call to
Camera.deinitialize().
After the Camera is intialised, acquisition properties (e.g. exposure_time and frame_rate) may be set and images
received. The Camera also supports event-driven acquisition, see Camera.add_event_callback(), where new images are
automatically passed on to the desired functions.
Args:
model (str, optional): The model used to determine the correct hardware API. Supported: 'ptgrey' for
PointGrey/FLIR Machine Vision cameras (using Spinnaker and PySpin).
identity (str, optional): String identifying the device. For model *ptgrey* this is 'serial number' *as a
string*.
name (str, optional): Name for the device.
auto_init (bool, optional): If both model and identity are given when creating the Camera and auto_init
is True (the default), Camera.initialize() will be called after creation.
debug_folder (pathlib.Path, optional): The folder for debug logging. If None (the default)
the folder *pypogs*/debug will be used/created.
Example:
::
# Create instance and set parameters (will auto initialise)
cam = pypogs.Camera(model='ptgrey', identity='18285284', name='CoarseCam')
cam.gain = 0 #decibel
cam.exposure_time = 100 #milliseconds
cam.frame_rate_auto = True
# Start acquisition
cam.start()
# Wait for a while
time.sleep(2)
# Read the latest image
img = cam.get_latest_image()
# Stop the acquisition
cam.stop()
# Release the hardware
cam.deinitialize()
"""
_supported_models = ('ptgrey',)
def __init__(self, model=None, identity=None, name=None, auto_init=True, debug_folder=None):
"""Create Camera instance. See class documentation."""
# Logger setup
self._debug_folder = None
if debug_folder is None:
try:
self.debug_folder = Path(__file__).parent / 'debug'
except:
self.debug_folder = Path()/'debug'
else:
self.debug_folder = debug_folder
self.log = logging.getLogger(f'{name}')
if not self.log.hasHandlers():
# Add new handlers to the logger if there are none
self.log.setLevel(logging.DEBUG)
# Console handler at INFO level
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
# File handler at DEBUG level
# fh = logging.FileHandler(self.debug_folder / 'log.txt')
fh = RotatingFileHandler(self.debug_folder / 'camera.log', maxBytes=1*1024*1024,
backupCount=2)
fh.setLevel(logging.DEBUG)
# Format and add
# log_formatter = logging.Formatter('%(asctime)s:%(name)s-%(levelname)s: %(message)s')
# log_formatter = logging.Formatter('%(asctime)s CAM-%(levelname)s: %(message)s')
log_formatter = logging.Formatter('%(asctime)s %(name)s-%(levelname)s-%(threadName)s'+
'-%(funcName)s-(%(lineno)d) %(message)s')
fh.setFormatter(log_formatter)
ch.setFormatter(log_formatter)
self.log.addHandler(fh)
self.log.addHandler(ch)
self.log.info('New console and file logging handlers added.')
# Start of constructor
self.log.debug('Camera Init: Model:'+str(model)+' ID:'+str(identity) \
+' Name:'+str(name) +' AutoInit:'+str(auto_init))
self._model = None
self._identity = None
self._name = 'UnnamedCamera'
self._plate_scale = 1.0
self._rotation = 0.0
self._flipX = False
self._flipY = False
self._rot90 = 0 #Number of times to rotate by 90 deg, done after flips
#Only used for ptgrey
self._ptgrey_camera = None
self._ptgrey_camlist = None
self._ptgrey_system = None
#Callbacks on image event
self._call_on_image = set()
self._got_image_event = Event()
self._image_data = None
self._image_frameID = None
self._image_timestamp = None
self._imgs_since_start = 0
self.log.debug('Calling self on constructor input')
if model is not None:
self.model = model
if identity is not None:
self.identity = identity
if name is not None:
self.name = name
if auto_init and not None in (model, identity):
self.log.debug('Trying to auto-initialise')
self.initialize()
self.log.debug('Registering destructor')
# TODO: Should we register deinitialisor instead? (probably yes...)
import atexit, weakref
atexit.register(weakref.ref(self.__del__))
self.log.info('Camera instance created with name: ' + self.name + '.')
def __del__(self):
"""Destructor. Releases hardware."""
if self.is_init:
self.deinitialize()
def getprops(self, prop_list):
""" Get FLIR Camera properties, listed in the prop_list"""
assert self.is_init, 'Camera must be initialised'
prop_dict = { i : None for i in prop_list }
try:
nodemap = self._ptgrey_camera.GetNodeMap()
for i, p in enumerate(prop_list):
# val_list[i] = PySpin.CIntegerPtr(nodemap.GetNode(p)).GetValue()
try: # integer
prop_dict[p] = PySpin.CIntegerPtr(nodemap.GetNode(p)).GetValue()
except:
try: # Float
prop_dict[p] = PySpin.CFloatPtr(nodemap.GetNode(p)).GetValue()
except:
try: # enumeration
node = PySpin.CEnumerationPtr(nodemap.GetNode(p))
prop_dict[p] = node.GetCurrentEntry().GetDisplayName().lower()
except: # Bool
prop_dict[p] = PySpin.CBooleanPtr(nodemap.GetNode(p)).GetValue()
self.log.debug(f'Found Node "{str(p)}" = {prop_dict[p]}')
except PySpin.SpinnakerException:
self.log.warning(f'Failed to read node "{str(p)}"')
finally:
return prop_dict
def setprops(self, prop_dict, stop=True):
""" Set FLIR Camera properties, listed in the prop_dict"""
assert self.is_init, 'Camera must be initialised'
was_stopped = False
if self.is_running and stop:
self.log.debug('Camera is running, stop it and restart immediately after.')
self.stop()
was_stopped = True
assert self.is_init, 'Camera must be initialised'
type_list = [type(value) for key, value in self.getprops(prop_dict).items()]
self.log.debug(f'Type_list = {type_list}')
try:
nodemap = self._ptgrey_camera.GetNodeMap()
for (key, value), t in zip(prop_dict.items(), type_list):
if t == int : # integer
PySpin.CIntegerPtr(nodemap.GetNode(key)).SetValue(value)
elif t == float:
PySpin.CFloatPtr(nodemap.GetNode(key)).SetValue(value)
elif t == str:
node = PySpin.CEnumerationPtr(nodemap.GetNode(key))
node.SetIntValue(node.GetEntryByName(value).GetValue())
elif t == bool:
# node = PySpin.CBooleanPtr(nodemap.GetNode('AcquisitionFrameRateEnable'))
PySpin.CBooleanPtr(nodemap.GetNode(key)).SetValue(value)
elif t == type(None):
self.log.warning(f'No property type found for node: "{key}"')
# raise Exception(f'No property type found for node: "{key}"')
return
else:
self.log.warning(f'Property type not implemented for node: "{key}"')
# raise Exception(f'Property type not implemented for node: "{key}"')
return
self.log.debug(f'Set Node "{key}" = {value}')
except PySpin.SpinnakerException as e:
if 'LogicalErrorException' in e.message:
self.log.warning(f'Node: "{key}", LogicalErrorException')
elif 'OutOfRangeException' in e.message:
self.log.warning(f'Node: "{key}", value: "{value}" is out of range.')
elif 'AccessException' in e.message:
self.log.warning(f'Not allowed to change Node: "{key}" now - Try "stop=True".')
else:
self.log.warning(f'Failed to set node: "{key}"')
if was_stopped :
try:
self.start()
self.log.debug('Restarted')
except Exception:
self.log.debug('Failed to restart: ', exc_info=True)
def _ptgrey_release(self):
"""PRIVATE: Release Point Grey hardware resources."""
self.log.debug('PointGrey hardware release called')
if self._ptgrey_camera is not None:
self.log.debug('Deleting PtGrey camera object')
del(self._ptgrey_camera) #Preferred over =None according to PtGrey
self._ptgrey_camera = None
if self._ptgrey_camlist is not None:
self.log.debug('Clearing and deleting PtGrey camlist')
self._ptgrey_camlist.Clear()
del(self._ptgrey_camlist)
self._ptgrey_camlist = None
if self._ptgrey_system is not None:
self.log.debug('Has PtGrey system. Is in use? '+str(self._ptgrey_system.IsInUse()))
if not self._ptgrey_system.IsInUse():
self.log.debug('Not in use, releasing and deleting')
self._ptgrey_system.ReleaseInstance()
del(self._ptgrey_system)
self._ptgrey_system = None
self.log.debug('Hardware released')
@property
def debug_folder(self):
"""pathlib.Path: Get or set the path for debug logging. Will create folder if not existing."""
return self._debug_folder
@debug_folder.setter
def debug_folder(self, path):
# Do not do logging in here! This will be called before the logger is set up
path = Path(path) #Make sure pathlib.Path
if path.is_file():
path = path.parent
if not path.is_dir():
path.mkdir(parents=True)
self._debug_folder = path
@property
def name(self):
"""str: Get or set the name."""
return self._name
@name.setter
def name(self, name):
self.log.debug('Setting name to: '+str(name))
self._name = str(name)
self.log.debug('Name set to '+str(self.name))
@property
def model(self):
"""str: Get or set the device model.
Supported:
- 'ptgrey' for FLIR/Point Grey cameras (using Spinnaker/PySpin SDKs).
- This will determine which hardware API that is used.
- Must set before initialising the device and may not be changed for an initialised device.
"""
return self._model
@model.setter
def model(self, model):
self.log.debug('Setting model to: '+str(model))
assert not self.is_init, 'Can not change already intialised device model'
model = str(model)
assert model.lower() in self._supported_models,\
'Model type not recognised, allowed: '+str(self._supported_models)
#TODO: Check that the APIs are available.
self._model = model
self.log.debug('Model set to '+str(self.model))
@property
def identity(self):
"""str: Get or set the device and/or input. Model must be defined first.
- For model *ptgrey* this is the serial number *as a string*
- Must set before initialising the device and may not be changed for an initialised device.
"""
return self._identity
@identity.setter
def identity(self, identity):
self.log.debug('Setting identity to: '+str(identity))
assert not self.is_init, 'Can not change already intialised device'
assert self.model is not None, 'Must define model first'
identity = str(identity)
if not self._ptgrey_system:
self._ptgrey_system = PySpin.System.GetInstance() #Get singleton
self._ptgrey_camlist = self._ptgrey_system.GetCameras()
self.log.debug('Got cam list, size:'+str(self._ptgrey_camlist.GetSize()))
self._ptgrey_camera = self._ptgrey_camlist.GetBySerial(identity)
valid = self._ptgrey_camera.IsValid()
self.log.debug('Got object, valid: '+str(valid))
if valid:
self.log.debug('Already init: '+str(self._ptgrey_camera.IsInitialized()))
if not valid:
self.log.debug('Invalid camera object. Cleaning up')
del(self._ptgrey_camera)
self._ptgrey_camera = None
self._ptgrey_camlist.Clear()
raise AssertionError('The camera was not found')
elif self._ptgrey_camera.IsInitialized():
self.log.debug('Camera object already in use. Cleaning up')
del(self._ptgrey_camera)
self._ptgrey_camera = None
self._ptgrey_camlist.Clear()
raise RuntimeError('The camera is already in use')
else:
self.log.debug('Seems valid. Setting identity and cleaning up')
del(self._ptgrey_camera)
self._ptgrey_camera = None
self._identity = identity
self._ptgrey_camlist.Clear()
self.log.debug('Identity set to: '+str(self.identity))
@property
def is_init(self):
"""bool: True if the device is initialised (and therefore ready to start)."""
init = self._ptgrey_camera is not None and self._ptgrey_camera.IsInitialized()
return init
def initialize(self):
"""Initialise (make ready to start) the device. The model and identity must be defined."""
self.log.debug('Initialising')
assert not self.is_init, 'Already initialised'
assert not None in (self.model, self.identity), 'Must define model and identity before initialising'
if self._ptgrey_camera is not None:
raise RuntimeError('There is already a camera object here')
if not self._ptgrey_system: self._ptgrey_system = PySpin.System.GetInstance() #Get singleton
if self._ptgrey_camlist: #Clear old list and get fresh one
self._ptgrey_camlist.Clear()
del(self._ptgrey_camlist)
self._ptgrey_camlist = self._ptgrey_system.GetCameras()
self.log.debug('Getting pyspin object and initialising')
self._ptgrey_camera = self._ptgrey_camlist.GetBySerial(self.identity)
self._ptgrey_camera.Init()
# BASIC SETUP
# self.log.debug('Setting gamma off')
# nodemap = self._ptgrey_camera.GetNodeMap()
# PySpin.CBooleanPtr(nodemap.GetNode('GammaEnable')).SetValue(False)
self.log.debug('Setting acquisition mode to continuous')
self._ptgrey_camera.AcquisitionMode.SetIntValue(PySpin.AcquisitionMode_Continuous)
self.log.debug('Setting stream mode to newest only')
self._ptgrey_camera.TLStream.StreamBufferHandlingMode.SetIntValue(
PySpin.StreamBufferHandlingMode_NewestOnly)
self.log.info('Camera successfully initialised')
def deinitialize(self):
"""De-initialise the device and release hardware resources. Will stop the acquisition if it is running."""
self.log.debug('De-initialising')
assert self.is_init, 'Not initialised'
if self.is_running:
self.log.debug('Is running, stopping')
self.stop()
self.log.debug('Stopped')
self.log.debug('Found PtGrey camera, deinitialising')
self.unregister_event_handler()
try:
self._ptgrey_camera.DeInit()
del(self._ptgrey_camera)
self._ptgrey_camera = None
self.log.debug('Deinitialised PtGrey camera object and deleted')
except:
self.log.exception('Failed to close task')
self.log.debug('Trying to release PtGrey hardware resources')
self._ptgrey_release()
def register_event_handler(self):
"""Initialise images event handler mode."""
class PtGreyEventHandler(PySpin.ImageEvent):
"""Barebones event handler for ptgrey, just pass along the event to the Camera class."""
def __init__(self, parent):
assert parent.model.lower() == 'ptgrey', 'Trying to attach ptgrey event handler to non ptgrey model'
super().__init__()
self.parent = parent
def OnImageEvent(self, image:PySpin.Image):
"""Read out the image and a timestamp, reshape to array, pass to parent"""
# self.parent.log.debug('Image event! Unpack and release pointer')
self.parent._image_timestamp = datetime.utcnow()
try:
# img = image.GetData()
image_converted = image.Convert(PySpin.PixelFormat_RGB8)
image_converted = image_converted.GetNDArray()
# print('img', image_converted.shape)
# img = img.reshape((img_ptr.GetHeight(), img_ptr.GetWidth(), 3))
if self.parent._flipX:
img = np.fliplr(image_converted)
if self.parent._flipY:
img = np.flipud(image_converted)
if self.parent._rot90:
img = np.rot90(image_converted, self.parent._rot90)
self.parent._image_data = image_converted
self.parent._image_frameID = image.GetFrameID()
except:
self.parent.log.warning('Failed to unpack image', exc_info=True)
self.parent._image_data = None
finally:
image.Release()
# self.parent._image_data = np.ones((100,100,3))
# self.parent._image_frameID = image.GetFrameID()
# image.Release()
self.parent._got_image_event.set()
if self.parent._imgs_since_start % 10 == 0:
self.parent.log.debug('Frames Received: ' + str(self.parent._imgs_since_start) \
+ ' Size:' + str(self.parent._image_data.shape) \
+ ' Type:' + str(self.parent._image_data.dtype))
for func in self.parent._call_on_image:
try:
self.parent.log.debug('Calling back to: ' + str(func))
func(self.parent._image_data, self.parent._image_frameID, self.parent._image_timestamp, self.parent.identity)
except:
self.parent.log.warning('Failed image callback', exc_info=True)
self.parent._imgs_since_start += 1
# self.parent.log.debug('Event handler finished.')
self._ptgrey_event_handler = PtGreyEventHandler(self)
self.log.debug('Created ptgrey image event handler')
self._ptgrey_camera.RegisterEvent( self._ptgrey_event_handler )
self.log.debug('Registered ptgrey image event handler')
def unregister_event_handler(self):
"""Unregister images event handler."""
try:
self._ptgrey_camera.UnregisterEvent(self._ptgrey_event_handler)
self.log.debug('Unregistered event handler')
except:
self.log.exception('Failed to unregister event handler')
@property
def available_properties(self):
"""tuple of str: Get all the available properties (settings) supported by this device."""
assert self.is_init, 'Camera must be initialised'
return ('flip_x', 'flip_y', 'rotate_90', 'plate_scale', 'rotation', 'binning', 'size_readout', 'frame_rate_auto',\
'frame_rate', 'gain_auto', 'gain', 'exposure_time_auto', 'exposure_time')
@property
def flip_x(self):
"""bool: Get or set if the image X-axis should be flipped. Default is False."""
self.log.debug('Get flip-X called')
assert self.is_init, 'Camera must be initialised'
self.log.debug('Using PtGrey camera. Will flip the received image array ourselves: ' +str(self._flipX))
return self._flipX
@flip_x.setter
def flip_x(self, flip):
self.log.debug('Set flip-X called with: '+str(flip))
assert self.is_init, 'Camera must be initialised'
flip = bool(flip)
self.log.debug('Using PtGrey camera. Will flip the received image array ourselves.')
self._flipX = flip
self.log.debug('_flipX set to: '+str(self._flipX))
@property
def flip_y(self):
"""bool: Get or set if the image Y-axis should be flipped. Default is False."""
self.log.debug('Get flip-Y called')
assert self.is_init, 'Camera must be initialised'
self.log.debug('Using PtGrey camera. Will flip the received image array ourselves: ' +str(self._flipX))
return self._flipY
@flip_y.setter
def flip_y(self, flip):
self.log.debug('Set flip-Y called with: '+str(flip))
assert self.is_init, 'Camera must be initialised'
flip = bool(flip)
self.log.debug('Using PtGrey camera. Will flip the received image array ourselves.')
self._flipY = flip
self.log.debug('_flipY set to: '+str(self._flipY))
@property
def rotate_90(self):
"""int: Get or set how many times the image should be rotated by 90 degrees. Applied *after* flip_x and flip_y.
"""
assert self.is_init, 'Camera must be initialised'
return self._rot90
@rotate_90.setter
def rotate_90(self, k):
self.log.debug('Set rot90 called with: '+str(k))
assert self.is_init, 'Camera must be initialised'
k = int(k)
self.log.debug('Using PtGrey camera. Will rotate the received image array ourselves.')
self._rot90 = k
self.log.debug('rot90 set to: '+str(self._rot90))
@property
def plate_scale(self):
"""float: Get or set the plate scale of the Camera in arcsec per pixel.
This will not affect anything in this class but is used elsewhere. Set this to the physical pixel plate scale
*before* any binning. When getting the plate scale it will be scaled by the binning factor.
"""
return self._plate_scale * self.binning
@plate_scale.setter
def plate_scale(self, arcsec):
self.log.debug('Set plate scale called with: '+str(arcsec))
self._plate_scale = float(arcsec)
self.log.debug('Plate scale set to: '+str(self.plate_scale))
@property
def rotation(self):
"""float: Get or set the camera rotation relative to the horizon in degrees.
This does not affect the received images, but is used elsewhere. Use rotate_90 first to keep this rotation
small.
"""
return self._rotation
@rotation.setter
def rotation(self, rot):
self.log.debug('Set rotation called with: '+str(rot))
self._rotation = float(rot)
self.log.debug('Rotation set to: '+str(self.rotation))
@property
def frame_rate_auto(self):
"""bool: Get or set automatic frame rate. If True camera will run as fast as possible."""
self.log.debug('Get frame rate auto called')
val = self.getprops(['AcquisitionFrameRateEnable'])['AcquisitionFrameRateEnable']
return not val
@frame_rate_auto.setter
def frame_rate_auto(self, auto):
self.log.debug('Set frame rate called with: '+str(auto))
auto = bool(auto)
self.setprops({'AcquisitionFrameRateEnable': not auto})
@property
def frame_rate_limit(self):
"""tuple of float: Get the minimum and maximum frame rate in Hz supported."""
self.log.debug('Get frame rate limit called')
mn,mx = list(self.getprops(['FrameRateHz_Min', 'FrameRateHz_Max']).values())
return (mn,mx)
@property
def frame_rate(self):
"""float: Get or set the camera frame rate in Hz. Will set auto frame rate to False."""
self.log.debug('Get frame rate called')
return self.getprops(['AcquisitionFrameRate'])['AcquisitionFrameRate']
@frame_rate.setter
def frame_rate(self, frame_rate_hz):
self.log.debug('Set frame rate called with: '+str(frame_rate_hz))
self.frame_rate_auto = False
self.setprops({'AcquisitionFrameRate':frame_rate_hz})
@property
def gain_auto(self):
"""bool: Get or set automatic gain. If True the gain will be continuously updated."""
self.log.debug('Get gain auto called')
val = self.getprops(['GainAuto'])['GainAuto'].lower()
return True if val == 'continuous' else False
@gain_auto.setter
def gain_auto(self, auto):
self.log.debug('Set gain called with: '+str(auto))
auto = bool(auto)
self.setprops({'GainAuto': 'Continuous' if auto else 'Off'})
@property
def gain_limit(self):
"""tuple of float: Get the minimum and maximum gain in dB supported."""
self.log.debug('Get gain limit called')
mn,mx = list(self.getprops(['GainDB_Min', 'GainDB_Max']).values())
return (mn,mx)
@property
def gain(self):
"""Float: Get or set the camera gain in dB. Will set auto frame rate to False."""
self.log.debug('Get gain called')
return self.getprops(['Gain'])['Gain']
@gain.setter
def gain(self, gain_db):
self.log.debug('Set gain called with: '+str(gain_db))
self.gain_auto = False
self.setprops({'Gain':gain_db})
@property
def exposure_time_auto(self):
"""bool: Get or set automatic exposure time. If True the exposure time will be continuously updated."""
self.log.debug('Get exposure time auto called')
val = self.getprops(['ExposureAuto'])['ExposureAuto'].lower()
return True if val == 'continuous' else False
@exposure_time_auto.setter
def exposure_time_auto(self, auto):
self.log.debug('Set exposure time called with: '+str(auto))
auto = bool(auto)
self.setprops({'ExposureAuto': 'Continuous' if auto else 'Off'})
@property
def exposure_time_limit(self):
"""tuple of float: Get the minimum and maximum expsure time in ms supported."""
self.log.debug('Get gain limit called')
prop_list = list(self.getprops(['ExposureTime_FloatMin', 'ExposureTime_FloatMax']).values())
return (prop_list[0]/1000, prop_list[1]/1000)
@property
def exposure_time(self):
"""float: Get or set the camera expsure time in ms. Will set auto exposure time to False."""
self.log.debug('Get exposure time called')
return self.getprops(['ExposureTime'])['ExposureTime'] / 1000
@exposure_time.setter
def exposure_time(self, exposure_ms):
self.log.debug('Set exposure time called with: '+str(exposure_ms))
assert self.is_init, 'Camera must be initialised'
exposure_ms = float(exposure_ms)*1000
self.exposure_time_auto = False
self.setprops({'ExposureTime':exposure_ms})
@property
def binning(self):
"""int: Number of pixels to bin in each dimension (e.g. 2 gives 2x2 binning). Bins by summing.
Setting will stop and restart camera if running. Will scale size_readout to show the same sensor area.
"""
val_horiz, val_vert = self.getprops(['BinningHorizontal','BinningVertical']).values()
if val_horiz != val_vert:
self.log.warning('Horzontal and vertical binning is not equal.')
return val_horiz
@binning.setter
def binning(self, binning):
self.log.debug('Set binning called with: '+str(binning))
binning = int(binning)
initial_size = self.size_readout
initial_bin = self.binning
self.log.debug('Initial sensor readout area and binning: '+str(initial_size)+' ,'+str(initial_bin))
self.setprops({'BinningHorizontal':binning, 'BinningVertical':binning})
new_bin = self.binning
bin_scaling = new_bin/initial_bin
new_size = [round(sz/bin_scaling) for sz in initial_size]
self.log.debug('New binning and new size to set: '+str(new_bin)+' ,'+str(new_size))
try:
self.size_readout = new_size
self.log.debug('Set new size to: ' + str(self.size_readout))
except:
self.log.warning('Failed to scale readout after binning change', exc_info=True)
@property
def size_max(self):
"""tuple of int: Get the maximum allowed readout size (width, height) in pixels."""
val_w, val_h = self.getprops(['WidthMax','HeightMax']).values()
return (val_w, val_h)
@property
def size_readout(self):
"""tuple of int: Get or set the number of pixels read out (width, height). Will automatically center.
This applies after binning, i.e. this is the size the output image will be.
Setting will stop and restart camera if running.
"""
val_w, val_h = self.getprops(['Width','Height']).values()
return (val_w, val_h)
@size_readout.setter
def size_readout(self, size):
assert self.is_init, 'Camera must be initialised'
if isinstance(size, (int, float)): size = (size, size)
size = tuple([int(x) for x in size])
self.log.debug(f'Setting size_readout({size})')
maxWidth, maxHeight = self.size_max
new_offset = (round((maxWidth - size[0]) / 2), round((maxHeight - size[1]) / 2))
self.log.debug('Neccessary offset: ' + str(new_offset))
self.setprops({'OffsetX':new_offset[0], 'OffsetY':new_offset[1], 'Width':size[0], 'Height':size[1]})
def add_event_callback(self, method):
"""Add a method to be called when a new image shows up.
The method should have the signature (image, timestamp, \*args, \*\*kwargs) where:
- image (numpy.ndarray): The image data as a 2D numpy array.
- timestamp (datetime.datetime): UTC timestamp when the image event occured (i.e. when the capture
finished).
- \*args, \*\*kwargs should be allowed for forward compatability.
The callback should *not* be used for computations, make sure the method returns as fast as possible.
Args:
method: The method to be called, with signature (image, timestamp, \*args, \*\*kwargs).
"""
self.log.debug('Adding to callbacks: ' + str(method))
self._call_on_image.add(method)
def remove_event_callback(self, method):
"""Remove method from event callbacks."""
self.log.debug('Removing callbacks: ' + str(method))
try:
self._call_on_image.remove(method)
except:
self.log.warning('Could not remove callback', exc_info=True)
@property
def is_running(self):
"""bool: True if device is currently acquiring data."""
# self.log.debug('Checking if running')
if not self.is_init: return False
if self.model.lower() == 'ptgrey':
return self._ptgrey_camera is not None and self._ptgrey_camera.IsStreaming()
else:
self.log.warning('Forbidden model string defined.')
raise RuntimeError('An unknown (forbidden) model is defined: '+str(self.model))
def start(self):
""" Start the acquisition. Device must be initialised."""
assert self.is_init, 'Must initialise first'
if self.is_running:
self.log.info('Camera already running, name: '+self.name)
return
self.log.debug('Got start command')
self._imgs_since_start = 0
try:
self._ptgrey_camera.BeginAcquisition()
except PySpin.SpinnakerException as e:
self.log.debug('Could not start:', exc_info=True)
if 'already streaming' in e.message:
self.log.warning('The camera was already streaming...')
else:
raise RuntimeError('Failed to start camera acquisition') from e
self.log.info('Acquisition started, name: '+self.name)
def stop(self):
"""Stop the acquisition."""
if not self.is_running:
self.log.info('Camera was not running, name: '+self.name)
return
self.log.debug('Got stop command')
if self.model.lower() == 'ptgrey':
self.log.debug('Using PtGrey')
try:
self._ptgrey_camera.EndAcquisition()
except:
self.log.debug('Could not stop:', exc_info=True)
raise RuntimeError('Failed to stop camera acquisition')
else:
self.log.warning('Forbidden model string defined.')
raise RuntimeError('An unknown (forbidden) model is defined: '+str(self.model))
self._image_data = None
self._image_timestamp = None
self._got_image_event.clear()
self.log.info('Acquisition stopped, name: '+self.name)
def get_next_image(self, timeout=10):
"""Get the next image to be completed. Camera does not have to be running.
Args:
timeout (float): Maximum time (seconds) to wait for the image before raising TimeoutError.
Returns:
numpy.ndarray: 2d array with image data.
"""
# self.log.debug('Got next image request')
assert self.is_init, 'Camera must be initialised'
if not self.is_running:
self.log.debug('Camera was not running, start and grab the first image')
self._got_image_event.clear()
self.start()
if not self._got_image_event.wait(timeout):
raise TimeoutError('Getting image timed out')
img = self._image_data
self.stop()
else:
# self.log.debug('Camera running, grab the first image to show up')
self._got_image_event.clear()
if not self._got_image_event.wait(timeout):
raise TimeoutError('Getting image timed out')
img = self._image_data
return img
def get_new_image(self, timeout=10):
"""Get an image guaranteed to be started *after* calling this method. Camera does not have to be running.
Args:
timeout (float): Maximum time (seconds) to wait for the image before raising TimeoutError.
Returns:
numpy.ndarray: 2d array with image data.
"""
self.log.debug('Got next image request')
assert self.is_init, 'Camera must be initialised'
if not self.is_running:
self.log.debug('Camera was not running, start and grab the first image')
self._got_image_event.clear()
self.start()
if not self._got_image_event.wait(timeout):
raise TimeoutError('Getting image timed out')
img = self._image_data
self.stop()
else:
self.log.debug('Camera running, grab the second image to show up')
self._got_image_event.clear()
if not self._got_image_event.wait(timeout/2):
raise TimeoutError('Getting image timed out')
self._got_image_event.clear()
if not self._got_image_event.wait(timeout/2):
raise TimeoutError('Getting image timed out')
img = self._image_data
return img
def get_latest_image(self):
"""Get latest image in the cache immediately. Camera must be running.
Returns:
numpy.ndarray: 2d array with image data.
"""
self.log.debug('Got latest image request')
assert self.is_running, 'Camera must be running'
return self._image_data | 2.4375 | 2 |
humbledb/errors.py | laiyonghao/humbledb | 26 | 12772656 | <reponame>laiyonghao/humbledb
"""
"""
class NoConnection(RuntimeError):
""" Raised when a connection is needed. """
class NestedConnection(RuntimeError):
""" Raised when trying to nest the same connection within itself. """
class MissingConfig(RuntimeError):
""" Raised when configuartion is not configured correctly at runtime. """
class DatabaseMismatch(RuntimeError):
"""
Raised when a user tries to use a document with a connection and the
databases don't match.
"""
def _import_pymongo_errors():
""" Tries to add all the pymongo exceptions to this module's namespace. """
import pymongo.errors
_pymongo_errors = ['AutoReconnect',
'BSONError',
'CertificateError',
'CollectionInvalid',
'ConfigurationError',
'ConnectionFailure',
'DuplicateKeyError',
'InvalidBSON',
'InvalidDocument',
'InvalidId',
'InvalidName',
'InvalidOperation',
'InvalidStringData',
'InvalidURI',
'OperationFailure',
'PyMongoError',
'TimeoutError',
'UnsupportedOption',
]
for name in _pymongo_errors:
try:
globals()[name] = getattr(pymongo.errors, name)
except AttributeError:
pass
# Call the import helper and remove it
_import_pymongo_errors()
del _import_pymongo_errors
| 2.65625 | 3 |
src/birder/core/queue.py | os4d/birder | 0 | 12772657 | # :copyright: Copyright (c) 2018-2020. OS4D Ltd - All Rights Reserved
# :license: Commercial
# Unauthorized copying of this file, via any medium is strictly prohibited
# Written by <NAME> <<EMAIL>>, October 2020
from birder.core.redis import client
channel = client.pubsub()
channel.subscribe('system')
send = lambda data: client.publish('system', data)
read = channel.get_message
| 1.585938 | 2 |
old/onsets.py | shlomo-Kallner/coventreiya | 0 | 12772658 | <filename>old/onsets.py
__name__ = 'onsets'
__version__ = '1.5.1'
__package__ = 'phonotactics'
# imports
#some import machinery checking and manipulations...
import sys
import os
from os import path
__mod_path = path.dirname(__file__)
if __mod_path not in sys.path:
sys.path.append(__mod_path)
import utils
from utils.fsm import fsm_state, fsm_transversal
from utils import lists
from utils import gen
import phonology
from phonology import consonants
# generator functions
########################################################
#
#
# Generating the Onsets
#
#
class Onsets:
def __init__(self, major=0, minor=0, patch=0):
self.__version = tuple(major,minor,patch)
def version(self):
return self.__version
def finite_state_machine(self):
return NotImplemented
def min_length(self):
return NotImplemented
def max_length(self):
return NotImplemented
def categories(self):
return NotImplemented
def replacment_map(self):
return NotImplemented
def num_categories(self):
return len(self.categories())
def all_allowable_sets_(self):
fsm_ = self.finite_state_machine()
t = gen.gen_list( self.max_length() ,
[ x for x in range(0,self.num_categories()) ] ,
self.min_length() )
results = list()
for i in t:
if fsm_transversal( i, fsm_ ):
results.append(i)
return results
def actuals_per_set(self, set_):
repl_map = self.replacment_map()
return gen.gen_replace_str1( set_, repl_map )
def actuals_per_set_to_file(self, set_, path, encoding_='utf-8'):
repl_map = self.replacment_map()
return gen.gen_replace_str_to_file( set_, repl_map, path, encoding_ )
def all_actuals(self):
t = self.all_allowable_sets_()
repl_map = self.replacment_map()
return gen.gen_actual(t, repl_map)
def all_actuals_to_file(self, path, encoding_='utf-8'):
t = self.all_allowable_sets_()
repl_map = self.replacment_map()
return gen.gen_actual_file(t, repl_map, path, encoding_)
class ver_1_5_1( Onsets ):
def __init__(self):
super().__init__(1,5,1)
def finite_state_machine(self):
fsm_ = [ fsm_state(str(x),False) for x in range(0,10) ]
fsm_[0].remap(False, {0 : fsm_[0],
1 : fsm_[0],
2 : fsm_[0],
3 : fsm_[0],
4 : fsm_[0]} )
fsm_[1].remap(False, {0 : fsm_[2],
1 : fsm_[3],
2 : fsm_[0],
3 : fsm_[0],
4 : fsm_[0]} )
fsm_[2].remap(True, {0 : fsm_[4],
1 : fsm_[6],
2 : fsm_[5],
3 : fsm_[7],
4 : fsm_[9]} )
fsm_[3].remap(False, {0 : fsm_[2],
1 : fsm_[0],
2 : fsm_[0],
3 : fsm_[0],
4 : fsm_[0]} )
fsm_[4].remap(True, {0 : fsm_[6],
1 : fsm_[6],
2 : fsm_[5],
3 : fsm_[7],
4 : fsm_[9]} )
fsm_[5].remap(True, {0 : fsm_[0],
1 : fsm_[0],
2 : fsm_[0],
3 : fsm_[7],
4 : fsm_[9]} )
fsm_[6].remap(False, {0 : fsm_[6],
1 : fsm_[6],
2 : fsm_[6],
3 : fsm_[6],
4 : fsm_[6]} )
fsm_[7].remap(True, {0 : fsm_[6],
1 : fsm_[6],
2 : fsm_[6],
3 : fsm_[8],
4 : fsm_[9]} )
fsm_[8].remap(True, {0 : fsm_[6],
1 : fsm_[6],
2 : fsm_[6],
3 : fsm_[6],
4 : fsm_[9]} )
fsm_[9].remap(True, {0 : fsm_[0],
1 : fsm_[0],
2 : fsm_[0],
3 : fsm_[0],
4 : fsm_[0]} )
return fsm_[1]
def min_length(self):
return 1
def max_length(self):
return 7
def categories(self):
# for parse string "(C2) C1 (C1) (C3) (S (S)) ('ʕ̞')"
# will be using:
# 0 for C1
# 1 for C2
# 2 for C3
# 3 for S
# 4 for 'ʕ̞'
# in the generator.
cons_ = consonants.ver_1_5_4()
cat_ = [ list() for x in range(0,5) ]
# cat_[0] is onset_c1
cat_[0].extend(cons_.stops_())
cat_[0].extend(cons_.fricatives_())
cat_[0].extend(cons_.affricates_())
cat_[0].extend(cons_.onset_approximant_())
cat_[0].extend(cons_.trill_())
cat_[0].extend(cons_.all_ejectives_())
# cat_[1] is onset_c2
cat_[1].extend(cons_.fricatives_())
cat_[1].extend(cons_.affricates_())
cat_[1].extend(cons_.all_ejectives_())
# cat_[2] is onset_c3
cat_[2].extend(cons_.rhotic_approximant_())
cat_[2].extend(cons_.onset_latteral_approximant_ext_())
# cat_[3] is onset_c4 or "(S)"
cat_[3].extend(cons_.semi_vowel_())
# cat[4] is onset_c5 or 'ʕ̞'
return cat_
def replacment_map(self):
{ 1 : c1,
2 : c2,
3 : c3,
4 : s,
5 : [ "ʕ̞" ] }
return NotImplemented
__current_version_used = ver_1_5_1()
def reset_current_version( version=None , major=None, minor=None, patch=None ):
if isinstance( version, Consonants ):
__current_version_used = version
elif isinstance(major,int)and isinstance(minor,int)and isinstance(patch,int):
if major == 1 and minor == 5 :
if patch == 1:
__current_version_used = ver_1_5_1()
else:
raise ValueError()
else:
raise ValueError()
else:
raise TypeError()
def get_current():
return __current_version_used
def get_current_version():
return __current_version_used.version()
#################################################################
#
#
# the old functions for compatibility...
#
#
def gen_():
# setting up the Finite State Machine for parsing...
# for parse string "(C2) C1 (C1) (C3) (S (S)) ('ʕ̞')"
# will be using:
# 1 for C1
# 2 for C2
# 3 for C3
# 4 for S
# 5 for 'ʕ̞'
# in the generator.
fsm_ = [ fsm_state(str(x),False) for x in range(0,10) ]
fsm_[0].remap(False, {1 : fsm_[0],
2 : fsm_[0],
3 : fsm_[0],
4 : fsm_[0],
5 : fsm_[0]} )
fsm_[1].remap(False, {1 : fsm_[2],
2 : fsm_[3],
3 : fsm_[0],
4 : fsm_[0],
5 : fsm_[0]} )
fsm_[2].remap(True, {1 : fsm_[4],
2 : fsm_[6],
3 : fsm_[5],
4 : fsm_[7],
5 : fsm_[9]} )
fsm_[3].remap(False, {1 : fsm_[2],
2 : fsm_[0],
3 : fsm_[0],
4 : fsm_[0],
5 : fsm_[0]} )
fsm_[4].remap(True, {1 : fsm_[6],
2 : fsm_[6],
3 : fsm_[5],
4 : fsm_[7],
5 : fsm_[9]} )
fsm_[5].remap(True, {1 : fsm_[0],
2 : fsm_[0],
3 : fsm_[0],
4 : fsm_[7],
5 : fsm_[9]} )
fsm_[6].remap(False, {1 : fsm_[6],
2 : fsm_[6],
3 : fsm_[6],
4 : fsm_[6],
5 : fsm_[6]} )
fsm_[7].remap(True, {1 : fsm_[6],
2 : fsm_[6],
3 : fsm_[6],
4 : fsm_[8],
5 : fsm_[9]} )
fsm_[8].remap(True, {1 : fsm_[6],
2 : fsm_[6],
3 : fsm_[6],
4 : fsm_[6],
5 : fsm_[9]} )
fsm_[9].remap(True, {1 : fsm_[0],
2 : fsm_[0],
3 : fsm_[0],
4 : fsm_[0],
5 : fsm_[0]} )
# "(C2) C1 (C1) (C3) (S (S)) ('ʕ̞')"
# in the generator:
# 1 for C1
# 2 for C2
# 3 for C3
# 4 for S
# 5 for 'ʕ̞'
t = gen.gen_list( 7 , [1,2,3,4,5] , 1 )
results = list()
for i in t:
if fsm_transversal( i, fsm_[1] ):
results.append(i)
return results
########################################################
#
# Actual Onsets Generation Functions
#
#
def gen_c1():
""" __ consonants """
c1_ = list()
c1_.extend(stop)
c1_.extend(gen_fric_())
c1_.extend(affricate)
c1_.extend(gen_onset_appr_())
c1_.extend(trill)
c1_.extend(ejective)
return c1_
def gen_c2():
""" __ consonants """
c2_ = list()
c2_.extend(gen_fric_())
c2_.extend(affricate)
c2_.extend(ejective)
return c2_
def gen_c3():
""" __ consonants """
c3_ = list()
c3_.extend(rhotic_approximant)
c3_.extend(onset_approximant_ext)
return c3_
def gen_actual_():
""" WARNING!!! this fuction uses A LOT of memory """
""" and may crash python!!!"""
results = list()
c1 = gen_c1()
c2 = gen_c2()
c3 = gen_c3()
t = gen_()
s = semi_vowel
# in the generator:
# 1 for C1
# 2 for C2
# 3 for C3
# 4 for S
# 5 for 'ʕ̞'
repl_map = { 1 : c1,
2 : c2,
3 : c3,
4 : s,
5 : [ "ʕ̞" ] }
for i in t:
j = gen.gen_replace_str1( i, repl_map )
results.extend(j)
return results
def gen_actual1_():
""" WARNING!!! this fuction uses A LOT of memory """
""" and may crash python!!!"""
c1 = gen_c1()
c2 = gen_c2()
c3 = gen_c3()
t = gen_()
# in the generator:
# 1 for C1
# 2 for C2
# 3 for C3
# 4 for S
# 5 for 'ʕ̞'
repl_map = { 1 : c1,
2 : c2,
3 : c3,
4 : semi_vowel,
5 : [ "ʕ̞" ] }
return gen_actual(t, repl_map)
def gen_actual_file_(path, encoding_='utf-8'):
results = 0
f = open(path, "w", encoding=encoding_)
c1 = gen_c1()
c2 = gen_c2()
c3 = gen_c3()
t = gen_()
# in the generator:
# 1 for C1
# 2 for C2
# 3 for C3
# 4 for S
# 5 for 'ʕ̞'
repl_map = { 1 : c1,
2 : c2,
3 : c3,
4 : semi_vowel,
5 : [ "ʕ̞" ] }
for i in t:
t1 = gen.gen_replace_str1( i, repl_map )
for j in t1:
print(j, file=f)
results += 1
return results
# gen_actual_file(list_, repl_map, path, encoding_='utf-8')
def gen_actual_file1_(path, encoding_='utf-8'):
c1 = gen_c1()
c2 = gen_c2()
c3 = gen_c3()
t = gen_()
# in the generator:
# 1 for C1
# 2 for C2
# 3 for C3
# 4 for S
# 5 for 'ʕ̞'
repl_map = { 1 : c1,
2 : c2,
3 : c3,
4 : semi_vowel,
5 : [ "ʕ̞" ] }
return gen.gen_actual_file(t, repl_map, path, encoding_)
| 2.40625 | 2 |
webapp/conf/__init__.py | buppter/EMS | 2 | 12772659 | """
author: buppter
datetime: 2020/1/16 9:57 下午
""" | 1.46875 | 1 |
Day 3/setup.py | SSSCodingClub/Jumper | 0 | 12772660 | <gh_stars>0
import pygame
pygame.init()
SCREEN_WIDTH, SCREEN_HEIGHT = 640, 640
| 1.515625 | 2 |
app/support/admin.py | gab98fra/curriculo.page-project | 1 | 12772661 | from django.contrib import admin
from .models import FAQModel, AssistanceModel, FeedbackModel
admin.site.register(FAQModel)
admin.site.register(AssistanceModel)
admin.site.register(FeedbackModel)
| 1.21875 | 1 |
rstblog/modules/__init__.py | mitsuhiko/rstblog | 44 | 12772662 | # -*- coding: utf-8 -*-
"""
rstblog.modules
~~~~~~~~~~~~~~~
The module interface.
:copyright: (c) 2010 by <NAME>.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
def add_module_path(folder):
"""Adds a new search path to the list of search paths."""
import os
__path__.append(os.path.abspath(folder))
def find_module(name):
"""Returns the module by the given name or raises an ImportError."""
import sys
full_name = 'rstblog.modules.' + name
__import__(full_name)
return sys.modules[full_name]
| 1.539063 | 2 |
azure/download_from_blob_legacy.py | michhar/my-azure-and-ml-utils | 12 | 12772663 | <gh_stars>10-100
"""
Download data from blob storage with legacy package version (tested with legacy
Python Azure Blob SDK v2.1)
Overwrites any existing folder from this container on local system.
Make sure to set the environment variables before running:
STORAGE_ACCOUNT_NAME
STORAGE_ACCOUNT_KEY
Based upon:
https://docs.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-python
"""
import os
from azure.storage.blob import BlockBlobService
import argparse
def arg_parse():
"""
Parse arguments
"""
parser = argparse.ArgumentParser(description='This script is for downloading blob files from a blob storage container on Azure.')
parser.add_argument("--container", dest='container', help="Blob storage container name", type=str)
parser.add_argument("--output-dir", dest='output_dir', help="Local folder to which files will be saved", type=str)
return parser.parse_args()
args = arg_parse()
os.makedirs(args.output_dir, exist_ok=True)
block_blob_service = BlockBlobService(account_name=os.getenv('STORAGE_ACCOUNT_NAME'),
account_key=os.getenv('STORAGE_ACCOUNT_KEY'))
container_name = args.container
generator = block_blob_service.list_blobs(container_name)
for blob in generator:
print("\t Blob name: " + blob.name)
os.makedirs(os.path.join(args.output_dir, os.path.dirname(blob.name)), exist_ok=True)
# Download the blob(s).
block_blob_service.get_blob_to_path(container_name, blob.name, os.path.join(args.output_dir, blob.name)) | 2.734375 | 3 |
training/dataframe.py | etienne912/FragmentClassifier | 0 | 12772664 | import os
import sys
import cv2
import numpy as np
import pandas as pd
import os
import matplotlib.image as mpimg
def file_to_fragment(path, return_path):
path = path
file_path = []
file_class = []
for subdirectory in os.walk(path):
for file in subdirectory[2]:
file_path.append(os.path.join(subdirectory[0], file))
file_class.append(file.split(".")[1])
df = pd.DataFrame({"class": file_class, "path": file_path})
path_sample = return_path
for cls in ["csv", "doc", "gif", "gz", "html", "jpg", "pdf", "png", "ppt", "ps", "txt", "xls", "xml"]:
path_folder = os.path.join(path_sample + "/" + cls)
if not os.path.isdir(path_folder):
os.mkdir(path_folder)
count = 0
for file_path in df[df["class"] == cls][["path"]].values:
file = open(file_path[0], "rb")
filesize = os.stat(file_path[0]).st_size
nb_images = (filesize - 1024) // 4096
if nb_images != 0:
file.read(512)
data = file.read(4096)
for i in range(nb_images):
flatNumpyArray = np.array(bytearray(data))
grayImage = flatNumpyArray.reshape(64, 64)
backtorgb = cv2.cvtColor(grayImage, cv2.COLOR_GRAY2RGB)
cv2.imwrite(os.path.join(path_folder, str(count) + '.jpg'), backtorgb)
data = file.read(4096)
count += 1
def read_fragment_to_dataframe(path: str):
cls = []
image = []
for subdirectory in os.walk(path):
for image in subdirectory[2]:
cls.append(subdirectory)
img = mpimg.imread(os.path.join(subdirectory[0], image))
print(img)
if __name__ == '__main__':
input_path = "./data/GovDocs"
return_path = "./data/Fragments"
file_to_fragment(path=input_path, return_path=return_path)
| 2.734375 | 3 |
create_model.py | jjac111/Stock-Trend-Predictor | 0 | 12772665 | <reponame>jjac111/Stock-Trend-Predictor
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, Callback
from keras.models import Model
from keras.layers import Dense, Input, Dropout, Activation
input_dim = 11
d1 = 2*input_dim
d2 = round(input_dim/2)
d3 = 1
activation = 'sigmoid'
dropout = 0.4
epochs = 10000
lr = 0.01
batch = 100
def create_model(t, n1, n2):
input_dim = 11
d1 = 2*input_dim
d2 = round(input_dim/2)
d3 = 1
activation = 'sigmoid'
dropout = 0.4
epochs = 10000
lr = 0.01
batch = 100
input_layer = Input(name='the_input', shape=(input_dim,), batch_shape=(None, input_dim))
# Add dense layers
dense_1 = Dense(d1, activation=activation)(input_layer)
drop_1 = Dropout(dropout)(dense_1)
dense_2 = Dense(d2, activation=activation)(drop_1)
drop_2 = Dropout(dropout)(dense_2)
dense_3 = Dense(d3, activation=activation)(drop_2)
# Add sigmoid activation layer
y_pred = Activation('sigmoid', name='sigmoid')(dense_3)
# Specify the model
model = Model(inputs=input_layer, outputs=y_pred, name=f'{t}-{n1}-{n2}')
model.output_length = lambda x: x
return model | 2.515625 | 3 |
mysite/mysite/context_processors.py | lirixiang123/question_repo | 0 | 12772666 | <gh_stars>0
from . import settings
def site_info(request):
#站点信息
site = {}
# site["SITE_URL"] = settings.SITE_URL
site["SITE_NAME"] = settings.SITE_NAME
site["SITE_DESC"] = settings.SITE_DESC
site["KEYWORDS"]=settings.SITE_KEYWORDS
# site["PRO_GIT"] = settings.PRO_GIT
# site["PRO_RSS"] = settings.PRO_RSS
# site["WEIBO_URL"] = settings.WEIBO_URL
return locals() | 1.90625 | 2 |
livelayermanager/models.py | parksandwildlife/borgcollector | 2 | 12772667 | <gh_stars>1-10
from __future__ import unicode_literals
import re
import json
import logging
import shutil
import os
import hglib
from datetime import datetime
import requests
from django.conf import settings
from django.db import models
from django.contrib.postgres.fields import HStoreField
from django.utils import timezone
from django.core.validators import RegexValidator
from django.dispatch import receiver
from django.db.models.signals import pre_save, pre_delete,post_save,post_delete
from django.db import transaction
from django.core.exceptions import ValidationError
from tablemanager.models import Workspace
from borg_utils.borg_config import BorgConfiguration
from borg_utils.resource_status import ResourceStatus,ResourceStatusMixin,ResourceAction
from borg_utils.transaction import TransactionMixin
from borg_utils.db_util import DbUtil
from borg_utils.spatial_table import SpatialTableMixin
from borg_utils.signals import inherit_support_receiver
from borg_utils.models import BorgModel,SQLField
from borg_utils.utils import file_md5
from borg_utils.hg_batch_push import try_set_push_owner, try_clear_push_owner, increase_committed_changes, try_push_to_repository
logger = logging.getLogger(__name__)
slug_re = re.compile(r'^[a-z_][a-z0-9_]+$')
validate_slug = RegexValidator(slug_re, "Slug can only start with lowercase letters or underscore, and contain lowercase letters, numbers and underscore", "invalid")
default_layer_geoserver_setting = {
"create_cache_layer": True,
"client_cache_expire": 0,
"meta_tiling_factor": [1, 1],
"server_cache_expire": 0,
"gridsets": {
"EPSG:3857": {
"enabled": True
},
"internal.fms.wa.gov.au/apps/sss": {
"enabled": True}
},
}
default_layer_geoserver_setting_json = json.dumps(default_layer_geoserver_setting)
# Create your models here.
class Datasource(BorgModel,ResourceStatusMixin,TransactionMixin):
name = models.SlugField(max_length=64,null=False,blank=False,editable=True,unique=True, help_text="The name of live layer datasource", validators=[validate_slug])
workspace = models.ForeignKey(Workspace, null=False,blank=False)
host = models.CharField(max_length=128,null=False,blank=False)
port = models.PositiveIntegerField(blank=False,default=5432)
db_name = models.CharField(max_length=64,null=False,blank=False,editable=True, help_text="The name of live layer database")
user = models.CharField(max_length=32,null=True,blank=True)
password = <PASSWORD>.CharField(max_length=32,null=True,blank=True)
schema = models.CharField(max_length=32,blank=False,default="public")
filter = models.CharField(max_length=32,blank=True,null=True)
geoserver_setting = models.TextField(blank=True,null=True,editable=False)
status = models.CharField(max_length=32,null=False,editable=False,choices=ResourceStatus.layer_status_options)
layers = models.PositiveIntegerField(null=False,editable=False,default=0)
last_refresh_time = models.DateTimeField(null=True,editable=False)
last_publish_time = models.DateTimeField(null=True,editable=False)
last_unpublish_time = models.DateTimeField(null=True,editable=False)
last_modify_time = models.DateTimeField(null=False,editable=False,default=timezone.now)
_filters = None
def clean(self):
if not self.pk:
self.status = ResourceStatus.New.name
else:
#already exist
self.status = self.next_status(ResourceAction.UPDATE)
if self.filter:
try:
[re.compile(f.strip()) for f in self.filter.split(";") if f.strip()]
except:
raise ValidationError("Invalid filter.")
self.last_modify_time = timezone.now()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
try:
if self.try_begin_transaction("datasource_save"):
with transaction.atomic():
super(Datasource,self).save(force_insert,force_update,using,update_fields)
else:
super(Datasource,self).save(force_insert,force_update,using,update_fields)
finally:
self.try_clear_transaction("datasource_save")
if (update_fields is None
and (self.changed_fields == "__all__" or any([f in self.changed_fields for f in ["host","port","db_name","schema","user","password","filter"]]))
):
self.refresh()
def delete(self,using=None):
logger.info('Delete {0}:{1}'.format(type(self),self.name))
try:
if self.try_begin_transaction("datasource_delete"):
with transaction.atomic():
super(Datasource,self).delete(using)
else:
super(Datasource,self).delete(using)
finally:
self.try_clear_transaction("datasource_delete")
@property
def dbUtil(self):
dbUtil = getattr(self,"_dbUtil") if hasattr(self,"_dbUtil") else None
if not dbUtil:
dbUtil = DbUtil(self.db_name,self.host,self.port,self.user,self.password)
setattr(self,"_dbUtil",dbUtil)
return dbUtil
def filter_table(self,table):
if not self.filter:
return True
if not self._filters:
self._filters = [re.compile(f.strip()) for f in self.filter.split(";") if f.strip()]
return any([f.search(table) for f in self._filters])
def refresh(self):
self.try_begin_transaction("datasource_refresh")
try:
#modify the table data
now = timezone.now()
tables = self.dbUtil.get_all_tables(self.schema)
views = self.dbUtil.get_all_views(self.schema)
now = timezone.now()
#refresh tables and views
for typename, tables in [["Table", tables],["Views", views]]:
for table_name in tables:
if not self.filter_table(table_name):
continue
layer, created = Layer.objects.get_or_create(datasource=self,
table=table_name,
defaults={
"type": typename,
"last_refresh_time": now,
"geoserver_setting":default_layer_geoserver_setting_json,
"status":ResourceStatus.New.name
}
)
layer.refresh(now)
Layer.objects.filter(datasource=self).exclude(last_refresh_time=now).delete()
for viewlayer in self.sqlviewlayer_set.all():
viewlayer.refresh(now)
self.layers = Layer.objects.filter(datasource=self).count()
self.last_refresh_time = now
self.save(update_fields=["layers","last_refresh_time"])
finally:
self.try_clear_transaction("datasource_refresh")
def json_filename(self,action='publish'):
if action in ['publish','unpublish']:
return os.path.join(self.workspace.publish_channel.name,"live_stores", "{}.{}.json".format(self.workspace.name, self.name))
else:
return os.path.join(self.workspace.publish_channel.name,"live_stores", "{}.{}.{}.json".format(self.workspace.name, self.name,action))
def json_filename_abs(self,action='publish'):
return os.path.join(BorgConfiguration.BORG_STATE_REPOSITORY, self.json_filename(action))
def unpublish(self):
publish_file = self.json_filename_abs('publish')
publish_json = None
if os.path.exists(publish_file):
with open(publish_file,"r") as f:
publish_json = json.loads(f.read())
else:
publish_json = {}
json_file = self.json_filename_abs('unpublish');
json_out = None
try_set_push_owner("liveserver")
hg = None
try:
if publish_json.get("action","publish") != "remove":
json_out = {}
json_out["name"] = self.name
json_out["workspace"] = self.workspace.name
json_out["channel"] = self.workspace.publish_channel.name
json_out["sync_geoserver_data"] = self.workspace.publish_channel.sync_geoserver_data
json_out['action'] = 'remove'
#retrieve meta data from the last publish task
meta_json = publish_json
if "meta" in publish_json and "file" in publish_json["meta"]:
meta_file = publish_json["meta"]["file"][len(BorgConfiguration.MASTER_PATH_PREFIX):]
if os.path.exists(meta_file):
with open(meta_file,"r") as f:
meta_json = json.loads(f.read())
else:
meta_json = {}
for key in ["name","workspace","channel","sync_geoserver_data"]:
if key in meta_json:
json_out[key] = meta_json[key]
else:
json_out = publish_josn
json_out["remove_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
#create the dir if required
if not os.path.exists(os.path.dirname(json_file)):
os.makedirs(os.path.dirname(json_file))
with open(json_file, "wb") as output:
json.dump(json_out, output, indent=4)
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
hg.commit(include=[json_file],addremove=True, user="borgcollector", message="Unpublish live store {}.{}".format(self.workspace.name, self.name))
increase_committed_changes()
try_push_to_repository("liveserver",hg)
finally:
if hg: hg.close()
try_clear_push_owner("liveserver")
def publish(self):
"""
publish store's json reference (if exists) to the repository,
"""
try_set_push_owner("liveserver")
hg = None
try:
meta_data = {}
meta_data["name"] = self.name
meta_data["host"] = self.host
meta_data["port"] = self.port
meta_data["database"] = self.db_name
meta_data["user"] = self.user
meta_data["passwd"] = self.password
meta_data["schema"] = self.schema
meta_data["workspace"] = self.workspace.name
meta_data["channel"] = self.workspace.publish_channel.name
meta_data["sync_geoserver_data"] = self.workspace.publish_channel.sync_geoserver_data
if self.geoserver_setting:
meta_data["geoserver_setting"] = json.loads(self.geoserver_setting)
#write meta data file
file_name = "{}.{}.meta.json".format(self.workspace.name,self.name)
meta_file = os.path.join(BorgConfiguration.LIVE_STORE_DIR,file_name)
#create the dir if required
if not os.path.exists(os.path.dirname(meta_file)):
os.makedirs(os.path.dirname(meta_file))
with open(meta_file,"wb") as output:
json.dump(meta_data, output, indent=4)
json_out = {}
json_out['meta'] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),"md5":file_md5(meta_file)}
json_out['action'] = 'publish'
json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
json_filename = self.json_filename_abs('publish');
#create the dir if required
if not os.path.exists(os.path.dirname(json_filename)):
os.makedirs(os.path.dirname(json_filename))
with open(json_filename, "wb") as output:
json.dump(json_out, output, indent=4)
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
hg.commit(include=[json_filename],addremove=True, user="borgcollector", message="Update live store {}.{}".format(self.workspace.name, self.name))
increase_committed_changes()
try_push_to_repository("liveserver",hg)
finally:
if hg: hg.close()
try_clear_push_owner("liveserver")
def __str__(self):
return self.name
class Meta:
ordering = ("name",)
class Layer(BorgModel,ResourceStatusMixin,TransactionMixin,SpatialTableMixin):
table = models.SlugField(max_length=64,null=False,editable=True)
datasource = models.ForeignKey(Datasource,editable=False,on_delete=models.CASCADE)
type = models.CharField(max_length=8,null=False,editable=False)
spatial_info = models.TextField(max_length=512,editable=False,null=True,blank=True)
sql = models.TextField(null=True, editable=False)
kmi_bbox = models.CharField(max_length=128,null=True,blank=True,editable=True)
name = models.CharField(max_length=256,null=True,editable=True,blank=True,unique=True)
geoserver_setting = models.TextField(blank=True,null=True,editable=False)
status = models.CharField(max_length=32, null=False, editable=False,choices=ResourceStatus.layer_status_options)
last_publish_time = models.DateTimeField(null=True,editable=False)
last_unpublish_time = models.DateTimeField(null=True,editable=False)
last_refresh_time = models.DateTimeField(null=False,editable=False)
last_modify_time = models.DateTimeField(null=True,editable=False)
@property
def table_name(self):
return self.table
@property
def table_schema(self):
return self.datasource.schema
@property
def db_util(self):
return self.datasource.dbUtil
@staticmethod
def is_system_table(table):
system_table_prefixes = ("django_","auth_","reversion_","pg_")
return any([table[0:len(prefix)] == prefix for prefix in system_table_prefixes])
def clean(self):
self.last_modify_time = timezone.now()
self.status = self.next_status(ResourceAction.UPDATE)
@property
def kmi_name(self):
return self.name or self.table.lower()
def refresh(self,time=None):
self.try_begin_transaction("livelayer_refresh")
try:
time = time or timezone.now()
if Layer.is_system_table(self.table):
return False
self.last_refresh_time = time
new_spatial_info = self.refresh_spatial_info().get_spatial_info()
if not self.sql or self.sql != self.get_create_table_sql() or self.spatial_info != new_spatial_info:
self.spatial_info = new_spatial_info
self.last_modify_time = time
self.sql = self.get_create_table_sql()
self.status = self.next_status(ResourceAction.UPDATE)
self.save()
return True
finally:
self.try_clear_transaction("livelayer_refresh")
@property
def builtin_metadata(self):
meta_data = {}
meta_data["workspace"] = self.datasource.workspace.name
meta_data["name"] = self.kmi_name
meta_data["service_type"] = "WMS"
if self.is_normal or not self.datasource.workspace.publish_channel.sync_geoserver_data:
meta_data["service_type"] = ""
elif self.is_raster:
meta_data["service_type"] = "WMS"
meta_data["service_type_version"] = self.datasource.workspace.publish_channel.wms_version
else:
meta_data["service_type"] = "WFS"
meta_data["service_type_version"] = self.datasource.workspace.publish_channel.wfs_version
meta_data["modified"] = (self.last_modify_time or self.last_refresh_time).astimezone(timezone.get_default_timezone()).strftime("%Y-%m-%d %H:%M:%S.%f")
#bbox
meta_data["bounding_box"] = self.kmi_bbox or (json.dumps(self.bbox) if self.bbox else None)
meta_data["crs"] = self.crs or None
#ows resource
meta_data["ows_resource"] = {}
if meta_data["service_type"] == "WFS" and self.datasource.workspace.publish_channel.wfs_endpoint:
meta_data["ows_resource"]["wfs"] = True
meta_data["ows_resource"]["wfs_version"] = self.datasource.workspace.publish_channel.wfs_version
meta_data["ows_resource"]["wfs_endpoint"] = self.datasource.workspace.publish_channel.wfs_endpoint
if meta_data["service_type"] in ("WFS","WMS") and self.datasource.workspace.publish_channel.wfs_endpoint:
meta_data["ows_resource"]["wms"] = True
meta_data["ows_resource"]["wms_version"] = self.datasource.workspace.publish_channel.wms_version
meta_data["ows_resource"]["wms_endpoint"] = self.datasource.workspace.publish_channel.wms_endpoint
geo_settings = json.loads(self.geoserver_setting) if self.geoserver_setting else {}
if geo_settings.get("create_cache_layer",False) and self.datasource.workspace.publish_channel.gwc_endpoint:
meta_data["ows_resource"]["gwc"] = True
meta_data["ows_resource"]["gwc_endpoint"] = self.datasource.workspace.publish_channel.gwc_endpoint
return meta_data
def update_catalogue_service(self,md5=False,extra_datas=None):
meta_data = self.builtin_metadata
if extra_datas:
meta_data.update(extra_datas)
bbox = meta_data.get("bounding_box",None)
crs = meta_data.get("crs",None)
#update catalog service
res = requests.post("{}/catalogue/api/records/?style_content=true".format(settings.CSW_URL),json=meta_data,auth=(settings.CSW_USER,settings.CSW_PASSWORD),verify=settings.CSW_CERT_VERIFY)
if 400 <= res.status_code < 600 and res.content:
res.reason = "{}({})".format(res.reason,res.content)
res.raise_for_status()
try:
meta_data = res.json()
except:
if res.content.find("microsoft") >= 0:
res.status_code = 401
res.reason = "Please login"
else:
res.status_code = 400
res.reason = "Unknown reason"
res.raise_for_status()
#process styles
styles = meta_data.get("styles",[])
#filter out qml and lyr styles
sld_styles = [s for s in meta_data.get("styles",[]) if s["format"].lower() == "sld" and s.get("raw_content")]
meta_data["styles"] = {}
style_dump_dir = BorgConfiguration.LIVE_LAYER_DIR
if not os.path.exists(style_dump_dir):
os.makedirs(style_dump_dir)
for style in sld_styles:
if style["default"]:
#default sld file
meta_data["default_style"] = style["name"]
#write the style into file system
style_file = os.path.join(style_dump_dir,"{}.{}.{}.sld".format(self.datasource.workspace.name,self.kmi_name,style["name"]))
with open(style_file,"wb") as f:
f.write(style["raw_content"].decode("base64"))
if md5:
meta_data["styles"][style["name"]] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, style_file),"default":style["default"],"md5":file_md5(style_file)}
else:
meta_data["styles"][style["name"]] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, style_file),"default":style["default"]}
#add extra data to meta data
meta_data["workspace"] = self.datasource.workspace.name
meta_data["override_bbox"] = True if self.kmi_bbox else False
meta_data["schema"] = self.datasource.schema
meta_data["name"] = self.kmi_name
meta_data["table"] = self.table
meta_data["datastore"] = self.datasource.name
meta_data["auth_level"] = self.datasource.workspace.auth_level
meta_data["preview_path"] = "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, BorgConfiguration.PREVIEW_DIR)
meta_data["spatial_data"] = self.is_spatial
if self.is_spatial:
meta_data["bbox"] = bbox
meta_data["crs"] = crs
meta_data["spatial_type"] = self.spatial_type
meta_data["spatial_column"] = self.spatial_column
meta_data["channel"] = self.datasource.workspace.publish_channel.name
meta_data["sync_geoserver_data"] = self.datasource.workspace.publish_channel.sync_geoserver_data
if self.geoserver_setting:
meta_data["geoserver_setting"] = json.loads(self.geoserver_setting)
#bbox
if "bounding_box" in meta_data:
del meta_data["bounding_box"]
return meta_data
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.data_changed: return
try:
if self.try_begin_transaction("livelayer_save"):
with transaction.atomic():
super(Layer,self).save(force_insert,force_update,using,update_fields)
else:
super(Layer,self).save(force_insert,force_update,using,update_fields)
finally:
self.try_clear_transaction("livelayer_save")
def delete(self,using=None):
try:
if self.try_begin_transaction("livelayer_delete"):
with transaction.atomic():
super(Layer,self).delete(using)
else:
super(Layer,self).delete(using)
finally:
self.try_clear_transaction("livelayer_delete")
def json_filename(self,action='publish'):
if action in ['publish','unpublish']:
return os.path.join(self.datasource.workspace.publish_channel.name,"live_layers", "{}.{}.json".format(self.datasource.workspace.name, self.kmi_name))
else:
return os.path.join(self.datasource.workspace.publish_channel.name,"live_layers", "{}.{}.{}.json".format(self.datasource.workspace.name, self.kmi_name,action))
def json_filename_abs(self,action='publish'):
return os.path.join(BorgConfiguration.BORG_STATE_REPOSITORY, self.json_filename(action))
def unpublish(self):
#use published meta file as the meta file for unpublish
publish_file = self.json_filename_abs('publish')
publish_json = None
if os.path.exists(publish_file):
with open(publish_file,"r") as f:
publish_json = json.loads(f.read())
else:
publish_json = {}
json_file = self.json_filename_abs('unpublish');
json_out = None
#remove it from catalogue service
res = requests.delete("{}/catalogue/api/records/{}:{}/".format(settings.CSW_URL,self.datasource.workspace.name,self.kmi_name),auth=(settings.CSW_USER,settings.CSW_PASSWORD),verify=settings.CSW_CERT_VERIFY)
if res.status_code != 404:
res.raise_for_status()
try_set_push_owner("livelayer")
hg = None
try:
if publish_json.get("action","publish") != "remove":
json_out = {}
json_out["name"] = self.kmi_name
json_out["workspace"] = self.datasource.workspace.name
json_out["styles"] = {}
json_out["spatial_data"] = self.is_spatial
json_out["channel"] = self.datasource.workspace.publish_channel.name
json_out["sync_geoserver_data"] = self.datasource.workspace.publish_channel.sync_geoserver_data
json_out['action'] = "remove"
#retrieve meta data from the last publish task
meta_json = publish_json
if "meta" in publish_json and "file" in publish_json["meta"]:
meta_file = publish_json["meta"]["file"][len(BorgConfiguration.MASTER_PATH_PREFIX):]
if os.path.exists(meta_file):
with open(meta_file,"r") as f:
meta_json = json.loads(f.read())
else:
meta_json = {}
for key,value in meta_json.get("styles",{}).iteritems():
json_out["styles"][key] = {"default":value.get("default",False)}
for key in ["name","workspace","channel","spatial_data","sync_geoserver_data"]:
if key in meta_json:
json_out[key] = meta_json[key]
else:
json_out = publish_json
json_out["remove_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
#create the dir if required
if not os.path.exists(os.path.dirname(json_file)):
os.makedirs(os.path.dirname(json_file))
with open(json_file, "wb") as output:
json.dump(json_out, output, indent=4)
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
#remove other related json files
json_files = [ self.json_filename_abs(action) for action in [ 'empty_gwc' ] ]
#get all existing files.
json_files = [ f for f in json_files if os.path.exists(f) ]
if json_files:
hg.remove(files=json_files)
json_files.append(json_file)
hg.commit(include=json_files,addremove=True, user="borgcollector", message="unpublish live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name))
increase_committed_changes()
try_push_to_repository("livelayer",hg)
finally:
if hg: hg.close()
try_clear_push_owner("livelayer")
def publish(self):
"""
publish layer's json reference (if exists) to the repository,
"""
json_filename = self.json_filename_abs('publish');
try_set_push_owner("livelayer")
hg = None
try:
meta_data = self.update_catalogue_service(md5=True,extra_datas={"publication_date":datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")})
#write meta data file
file_name = "{}.{}.meta.json".format(self.datasource.workspace.name,self.kmi_name)
meta_file = os.path.join(BorgConfiguration.LIVE_LAYER_DIR,file_name)
#create the dir if required
if not os.path.exists(os.path.dirname(meta_file)):
os.makedirs(os.path.dirname(meta_file))
with open(meta_file,"wb") as output:
json.dump(meta_data, output, indent=4)
json_out = {}
json_out['meta'] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),"md5":file_md5(meta_file)}
json_out['action'] = "publish"
json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
#create the dir if required
if not os.path.exists(os.path.dirname(json_filename)):
os.makedirs(os.path.dirname(json_filename))
with open(json_filename, "wb") as output:
json.dump(json_out, output, indent=4)
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
#remove other related json files
json_files = [ self.json_filename_abs(action) for action in [ 'empty_gwc' ] ]
#get all existing files.
json_files = [ f for f in json_files if os.path.exists(f) ]
if json_files:
hg.remove(files=json_files)
json_files.append(json_filename)
hg.commit(include=json_files,addremove=True, user="borgcollector", message="update live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name))
increase_committed_changes()
try_push_to_repository("livelayer",hg)
finally:
if hg: hg.close()
try_clear_push_owner("livelayer")
def empty_gwc(self):
"""
update layer's json for empty gwc to the repository
"""
if self.publish_status.unpublished:
#layer is not published, no need to empty gwc
raise ValidationError("The wms layer({0}) is not published before.".format(self.kmi_name))
json_filename = self.json_filename_abs('empty_gwc');
try_set_push_owner("livelayer")
hg = None
try:
json_out = {}
json_out["name"] = self.kmi_name
json_out["workspace"] = self.datasource.workspace.name
json_out["store"] = self.datasource.name
json_out["action"] = "empty_gwc"
json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
#create the dir if required
if not os.path.exists(os.path.dirname(json_filename)):
os.makedirs(os.path.dirname(json_filename))
with open(json_filename, "wb") as output:
json.dump(json_out, output, indent=4)
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
hg.commit(include=[json_filename],addremove=True, user="borgcollector", message="Empty GWC of live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name))
increase_committed_changes()
try_push_to_repository("livelayer",hg)
finally:
if hg: hg.close()
try_clear_push_owner("livelayer")
def __str__(self):
return self.kmi_name
class Meta:
unique_together = (("datasource","table"),)
ordering = ("datasource","table")
class PublishedLayerManager(models.Manager):
def get_queryset(self):
return super(LayerManager, self).get_queryset().filter(status__in=ResourceStatus.published_status)
class PublishedLayer(Layer):
objects = PublishedLayerManager
class Meta:
proxy = True
verbose_name="Published layer"
verbose_name_plural="Published layers"
class SqlViewLayer(BorgModel,ResourceStatusMixin,TransactionMixin,SpatialTableMixin):
name = models.SlugField(max_length=60,null=False,blank=False,editable=True,unique=True, help_text="The name of live layer", validators=[validate_slug])
datasource = models.ForeignKey(Datasource,editable=True,null=False,blank=False,on_delete=models.CASCADE)
viewsql = SQLField(null=False,blank=False)
spatial_info = models.TextField(max_length=512,editable=False,null=True,blank=True)
sql = models.TextField(null=True, editable=False)
kmi_bbox = models.CharField(max_length=128,null=True,blank=True,editable=True)
geoserver_setting = models.TextField(blank=True,null=True,editable=False)
status = models.CharField(max_length=32, null=False, editable=False,choices=ResourceStatus.layer_status_options,default=ResourceStatus.New.name)
last_publish_time = models.DateTimeField(null=True,editable=False)
last_unpublish_time = models.DateTimeField(null=True,editable=False)
last_refresh_time = models.DateTimeField(null=False,editable=False)
last_modify_time = models.DateTimeField(null=True,editable=False)
create_time = models.DateTimeField(auto_now=False,auto_now_add=True,editable=False,null=False)
@property
def table_name(self):
return self.name
@property
def table_schema(self):
return self.datasource.schema
@property
def table_sql(self):
return self.viewsql
@property
def db_util(self):
return self.datasource.dbUtil
def clean(self):
if not self.data_changed: return
self.last_modify_time = timezone.now()
self.status = self.next_status(ResourceAction.UPDATE)
if not self.last_refresh_time:
self.last_refresh_time = self.datasource.last_refresh_time
self.spatial_info = self.refresh_spatial_info().get_spatial_info()
self.sql = self.get_create_table_sql()
def refresh(self,time=None):
self.try_begin_transaction("livesqlviewlayer_refresh")
try:
time = time or timezone.now()
new_spatial_info = self.refresh_spatial_info().get_spatial_info()
self.last_refresh_time = time
if not self.sql or self.sql != self.get_create_table_sql() or self.spatial_info != new_spatial_info:
self.spatial_info = new_spatial_info
self.last_modify_time = time
self.sql = self.get_create_table_sql()
self.status = self.next_status(ResourceAction.UPDATE)
self.save()
return True
finally:
self.try_clear_transaction("livesqlviewlayer_refresh")
@property
def kmi_name(self):
return self.name
@property
def builtin_metadata(self):
meta_data = {}
meta_data["workspace"] = self.datasource.workspace.name
meta_data["name"] = self.kmi_name
meta_data["service_type"] = "WMS"
if self.is_normal or not self.datasource.workspace.publish_channel.sync_geoserver_data:
meta_data["service_type"] = ""
elif self.is_raster:
meta_data["service_type"] = "WMS"
meta_data["service_type_version"] = self.datasource.workspace.publish_channel.wms_version
else:
meta_data["service_type"] = "WFS"
meta_data["service_type_version"] = self.datasource.workspace.publish_channel.wfs_version
meta_data["modified"] = (self.last_modify_time or self.last_refresh_time).astimezone(timezone.get_default_timezone()).strftime("%Y-%m-%d %H:%M:%S.%f")
#bbox
meta_data["bounding_box"] = self.kmi_bbox or (json.dumps(self.bbox) if self.bbox else None)
meta_data["crs"] = self.crs or None
#ows resource
meta_data["ows_resource"] = {}
if meta_data["service_type"] == "WFS" and self.datasource.workspace.publish_channel.wfs_endpoint:
meta_data["ows_resource"]["wfs"] = True
meta_data["ows_resource"]["wfs_version"] = self.datasource.workspace.publish_channel.wfs_version
meta_data["ows_resource"]["wfs_endpoint"] = self.datasource.workspace.publish_channel.wfs_endpoint
if meta_data["service_type"] in ("WFS","WMS") and self.datasource.workspace.publish_channel.wfs_endpoint:
meta_data["ows_resource"]["wms"] = True
meta_data["ows_resource"]["wms_version"] = self.datasource.workspace.publish_channel.wms_version
meta_data["ows_resource"]["wms_endpoint"] = self.datasource.workspace.publish_channel.wms_endpoint
geo_settings = json.loads(self.geoserver_setting) if self.geoserver_setting else {}
if geo_settings.get("create_cache_layer",False) and self.datasource.workspace.publish_channel.gwc_endpoint:
meta_data["ows_resource"]["gwc"] = True
meta_data["ows_resource"]["gwc_endpoint"] = self.datasource.workspace.publish_channel.gwc_endpoint
return meta_data
def update_catalogue_service(self,md5=False,extra_datas=None):
meta_data = self.builtin_metadata
if extra_datas:
meta_data.update(extra_datas)
bbox = meta_data.get("bounding_box",None)
crs = meta_data.get("crs",None)
#update catalog service
res = requests.post("{}/catalogue/api/records/?style_content=true".format(settings.CSW_URL),json=meta_data,auth=(settings.CSW_USER,settings.CSW_PASSWORD),verify=settings.CSW_CERT_VERIFY)
if 400 <= res.status_code < 600 and res.content:
res.reason = "{}({})".format(res.reason,res.content)
res.raise_for_status()
try:
meta_data = res.json()
except:
if res.content.find("microsoft") >= 0:
res.status_code = 401
res.reason = "Please login"
else:
res.status_code = 400
res.reason = "Unknown reason"
res.raise_for_status()
#process styles
styles = meta_data.get("styles",[])
#filter out qml and lyr styles
sld_styles = [s for s in meta_data.get("styles",[]) if s["format"].lower() == "sld" and s.get("raw_content")]
meta_data["styles"] = {}
style_dump_dir = BorgConfiguration.LIVE_LAYER_DIR
if not os.path.exists(style_dump_dir):
os.makedirs(style_dump_dir)
for style in sld_styles:
if style["default"]:
#default sld file
meta_data["default_style"] = style["name"]
#write the style into file system
style_file = os.path.join(style_dump_dir,"{}.{}.{}.sld".format(self.datasource.workspace.name,self.kmi_name,style["name"]))
with open(style_file,"wb") as f:
f.write(style["raw_content"].decode("base64"))
if md5:
meta_data["styles"][style["name"]] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, style_file),"default":style["default"],"md5":file_md5(style_file)}
else:
meta_data["styles"][style["name"]] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, style_file),"default":style["default"]}
#add extra data to meta data
meta_data["workspace"] = self.datasource.workspace.name
meta_data["override_bbox"] = True if self.kmi_bbox else False
meta_data["name"] = self.kmi_name
meta_data["datastore"] = self.datasource.name
meta_data["auth_level"] = self.datasource.workspace.auth_level
meta_data["preview_path"] = "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, BorgConfiguration.PREVIEW_DIR)
meta_data["spatial_data"] = self.is_spatial
if self.is_spatial:
meta_data["bbox"] = bbox
meta_data["crs"] = crs
meta_data["spatial_type"] = self.spatial_type
meta_data["spatial_column"] = self.spatial_column
meta_data["channel"] = self.datasource.workspace.publish_channel.name
meta_data["sync_geoserver_data"] = self.datasource.workspace.publish_channel.sync_geoserver_data
if self.geoserver_setting:
meta_data["geoserver_setting"] = json.loads(self.geoserver_setting)
meta_data["viewsql"] = self.viewsql
#bbox
if "bounding_box" in meta_data:
del meta_data["bounding_box"]
return meta_data
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.data_changed: return
try:
if self.try_begin_transaction("livesqlviewlayer_save"):
with transaction.atomic():
super(SqlViewLayer,self).save(force_insert,force_update,using,update_fields)
else:
super(SqlViewLayer,self).save(force_insert,force_update,using,update_fields)
finally:
self.try_clear_transaction("livesqlviewlayer_save")
def delete(self,using=None):
try:
if self.try_begin_transaction("livesqlviewlayer_delete"):
with transaction.atomic():
super(SqlViewLayer,self).delete(using)
else:
super(SqlViewLayer,self).delete(using)
finally:
self.try_clear_transaction("livesqlviewlayer_delete")
def json_filename(self,action='publish'):
if action in ['publish','unpublish']:
return os.path.join(self.datasource.workspace.publish_channel.name,"live_layers", "{}.{}.json".format(self.datasource.workspace.name, self.kmi_name))
else:
return os.path.join(self.datasource.workspace.publish_channel.name,"live_layers", "{}.{}.{}.json".format(self.datasource.workspace.name, self.kmi_name,action))
def json_filename_abs(self,action='publish'):
return os.path.join(BorgConfiguration.BORG_STATE_REPOSITORY, self.json_filename(action))
def unpublish(self):
#use published meta file as the meta file for unpublish
publish_file = self.json_filename_abs('publish')
publish_json = None
if os.path.exists(publish_file):
with open(publish_file,"r") as f:
publish_json = json.loads(f.read())
else:
publish_json = {}
json_file = self.json_filename_abs('unpublish');
json_out = None
#remove it from catalogue service
res = requests.delete("{}/catalogue/api/records/{}:{}/".format(settings.CSW_URL,self.datasource.workspace.name,self.kmi_name),auth=(settings.CSW_USER,settings.CSW_PASSWORD),verify=settings.CSW_CERT_VERIFY)
if res.status_code != 404:
res.raise_for_status()
hg = None
try:
if publish_json.get("action","publish") != "remove":
json_out = {}
json_out["name"] = self.kmi_name
json_out["workspace"] = self.datasource.workspace.name
json_out["styles"] = {}
json_out["spatial_data"] = self.is_spatial
json_out["channel"] = self.datasource.workspace.publish_channel.name
json_out["sync_geoserver_data"] = self.datasource.workspace.publish_channel.sync_geoserver_data
json_out['action'] = "remove"
#retrieve meta data from the last publish task
meta_json = publish_json
if "meta" in publish_json and "file" in publish_json["meta"]:
meta_file = publish_json["meta"]["file"][len(BorgConfiguration.MASTER_PATH_PREFIX):]
if os.path.exists(meta_file):
with open(meta_file,"r") as f:
meta_json = json.loads(f.read())
else:
meta_json = {}
for key,value in meta_json.get("styles",{}).iteritems():
json_out["styles"][key] = {"default":value.get("default",False)}
for key in ["name","workspace","channel","spatial_data","sync_geoserver_data"]:
if key in meta_json:
json_out[key] = meta_json[key]
else:
json_out = publish_json
json_out["remove_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
#create the dir if required
if not os.path.exists(os.path.dirname(json_file)):
os.makedirs(os.path.dirname(json_file))
with open(json_file, "wb") as output:
json.dump(json_out, output, indent=4)
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
#remove other related json files
json_files = [ self.json_filename_abs(action) for action in [ 'empty_gwc' ] ]
#get all existing files.
json_files = [ f for f in json_files if os.path.exists(f) ]
if json_files:
hg.remove(files=json_files)
json_files.append(json_file)
hg.commit(include=json_files,addremove=True, user="borgcollector", message="unpublish live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name))
increase_committed_changes()
try_push_to_repository("livelayer",hg)
finally:
if hg: hg.close()
try_clear_push_owner("livelayer")
def publish(self):
"""
publish layer's json reference (if exists) to the repository,
"""
#import ipdb;ipdb.set_trace()
json_filename = self.json_filename_abs('publish');
try_set_push_owner("livesqlviewlayer")
hg = None
try:
meta_data = self.update_catalogue_service(md5=True,extra_datas={"publication_date":datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")})
#write meta data file
file_name = "{}.{}.meta.json".format(self.datasource.workspace.name,self.kmi_name)
meta_file = os.path.join(BorgConfiguration.LIVE_LAYER_DIR,file_name)
#create the dir if required
if not os.path.exists(os.path.dirname(meta_file)):
os.makedirs(os.path.dirname(meta_file))
with open(meta_file,"wb") as output:
json.dump(meta_data, output, indent=4)
json_out = {}
json_out['meta'] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),"md5":file_md5(meta_file)}
json_out['action'] = "publish"
json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
#create the dir if required
if not os.path.exists(os.path.dirname(json_filename)):
os.makedirs(os.path.dirname(json_filename))
with open(json_filename, "wb") as output:
json.dump(json_out, output, indent=4)
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
#remove other related json files
json_files = [ self.json_filename_abs(action) for action in [ 'empty_gwc' ] ]
#get all existing files.
json_files = [ f for f in json_files if os.path.exists(f) ]
if json_files:
hg.remove(files=json_files)
json_files.append(json_filename)
hg.commit(include=json_files,addremove=True, user="borgcollector", message="update live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name))
increase_committed_changes()
try_push_to_repository("livesqlviewlayer",hg)
finally:
if hg: hg.close()
try_clear_push_owner("livesqlviewlayer")
def empty_gwc(self):
"""
update layer's json for empty gwc to the repository
"""
if self.publish_status.unpublished:
#layer is not published, no need to empty gwc
raise ValidationError("The wms layer({0}) is not published before.".format(self.kmi_name))
json_filename = self.json_filename_abs('empty_gwc');
try_set_push_owner("livelayer")
hg = None
try:
json_out = {}
json_out["name"] = self.kmi_name
json_out["workspace"] = self.datasource.workspace.name
json_out["store"] = self.datasource.name
json_out["action"] = "empty_gwc"
json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
#create the dir if required
if not os.path.exists(os.path.dirname(json_filename)):
os.makedirs(os.path.dirname(json_filename))
with open(json_filename, "wb") as output:
json.dump(json_out, output, indent=4)
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
hg.commit(include=[json_filename],addremove=True, user="borgcollector", message="Empty GWC of live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name))
increase_committed_changes()
try_push_to_repository("livelayer",hg)
finally:
if hg: hg.close()
try_clear_push_owner("livelayer")
def __str__(self):
return self.kmi_name
class Meta:
ordering = ("datasource","name")
class DatasourceEventListener(object):
@staticmethod
@receiver(pre_delete, sender=Datasource)
def _pre_delete(sender, instance, **args):
#unpublish the datasource first
target_status = instance.next_status(ResourceAction.UNPUBLISH)
if target_status != instance.status or instance.unpublish_required:
instance.status = target_status
instance.save(update_fields=['status','last_unpublish_time'])
@staticmethod
@receiver(pre_save, sender=Datasource)
def _pre_save(sender, instance, **args):
if instance.unpublish_required:
#unpublish all layers belonging to the server
for layer in instance.layer_set.all():
target_status = layer.next_status(ResourceAction.CASCADE_UNPUBLISH)
if layer.status != target_status or layer.unpublish_required:
#need to unpublish
layer.status = target_status
layer.save(update_fields=["status","last_unpublish_time"])
for viewlayer in instance.sqlviewlayer_set.all():
target_status = viewlayer.next_status(ResourceAction.CASCADE_UNPUBLISH)
if viewlayer.status != target_status or viewlayer.unpublish_required:
#need to unpublish
viewlayer.status = target_status
viewlayer.save(update_fields=["status","last_unpublish_time"])
instance.unpublish()
instance.last_unpublish_time = timezone.now()
elif instance.publish_required:
instance.publish()
#publish succeed, change the status to published.
instance.last_publish_time = timezone.now()
#cascade publish layers
"""
cascade publish will trigger all published layers from this wms server to be published again, and in most cases, this is unnecessary.
for layer in instance.layer_set.all():
target_status = layer.next_status(ResourceAction.CASCADE_PUBLISH)
if layer.status != target_status or layer.publish_required:
#need to publish
layer.status = target_status
layer.save(update_fields=["status","last_publish_time"])
for viewlayer in instance.sqlviewlayer_set.all():
target_status = viewlayer.next_status(ResourceAction.CASCADE_PUBLISH)
if viewlayer.status != target_status or viewlayer.publish_required:
#need to unpublish
viewlayer.status = target_status
viewlayer.save(update_fields=["status","last_publish_time"])
"""
class LayerEventListener(object):
@staticmethod
@receiver(pre_delete, sender=Layer)
def _pre_delete(sender, instance, **args):
#unpublish the layer first
target_status = instance.next_status(ResourceAction.UNPUBLISH)
if target_status != instance.status or instance.unpublish_required:
instance.status = target_status
instance.save(update_fields=['status','last_unpublish_time'])
@staticmethod
@receiver(post_delete, sender=Layer)
def _post_delete(sender, instance, **args):
pass
@staticmethod
@inherit_support_receiver(pre_save, sender=Layer)
def _pre_save(sender, instance, **args):
if "update_fields" in args and args['update_fields'] and "status" in args["update_fields"]:
if instance.unpublish_required:
instance.unpublish()
instance.last_unpublish_time = timezone.now()
elif instance.publish_required:
#publish the datasource to which this layer belongs to
datasource = instance.datasource
target_status = datasource.next_status(ResourceAction.DEPENDENT_PUBLISH)
if datasource.status != target_status or datasource.publish_required:
#associated datasource is not published,publish it
datasource.status = target_status
datasource.save(update_fields=["status","last_publish_time"])
instance.publish()
instance.last_publish_time = timezone.now()
class SqlViewLayerEventListener(object):
@staticmethod
@receiver(pre_delete, sender=SqlViewLayer)
def _pre_delete(sender, instance, **args):
#unpublish the layer first
target_status = instance.next_status(ResourceAction.UNPUBLISH)
if target_status != instance.status or instance.unpublish_required:
instance.status = target_status
instance.save(update_fields=['status','last_unpublish_time'])
@staticmethod
@receiver(post_delete, sender=SqlViewLayer)
def _post_delete(sender, instance, **args):
pass
@staticmethod
@inherit_support_receiver(pre_save, sender=SqlViewLayer)
def _pre_save(sender, instance, **args):
#import ipdb;ipdb.set_trace()
if "update_fields" in args and args['update_fields'] and "status" in args["update_fields"]:
if instance.unpublish_required:
instance.unpublish()
instance.last_unpublish_time = timezone.now()
elif instance.publish_required:
#publish the datasource to which this layer belongs to
datasource = instance.datasource
target_status = datasource.next_status(ResourceAction.DEPENDENT_PUBLISH)
if datasource.status != target_status or datasource.publish_required:
#associated datasource is not published,publish it
datasource.status = target_status
datasource.save(update_fields=["status","last_publish_time"])
instance.publish()
instance.last_publish_time = timezone.now()
| 1.5625 | 2 |
ies/iesutils.py | nickmcdonald/CNDL | 0 | 12772668 | <reponame>nickmcdonald/CNDL
########################################################
#
# Copyright (C) 2020-2021 <NAME> <<EMAIL>>
#
# This file is part of CNDL.
#
# CNDL can not be copied and/or distributed without the express
#
# permission of <NAME>
########################################################
from enum import Enum
import math
import random
from ies import IesData, IesAngle
DEFAULT_LAT = 180
DEFAULT_LONG = 1
class MixMethod(Enum):
ADD = 'Add'
SUBTRACT = 'Subtract'
MULTIPLY = 'Multiply'
AVERAGE = 'Average'
MIN = 'Minimum'
MAX = 'Maximum'
class FalloffMethod(Enum):
SMOOTH = 'Smooth'
LINEAR = 'Linear'
SHARP = 'Sharp'
ROOT = 'Root'
class LightDirection(Enum):
DOWN = 'Down'
UP = 'Up'
def linearInterpolate(a: float, b: float, x: float):
return a * (1 - x) + b * x
def smoothInterpolate(a: float, b: float, x: float):
ft = x * math.pi
f = (1 - math.cos(ft)) / 2
return a * (1 - f) + b * f
def sharpInterpolate(a: float, b: float, x: float):
return (b - a) * x ** 2 + a
def rootInterpolate(a: float, b: float, x: float):
return (b - a) * math.sqrt(x) + a
def mixIesData(ies1: IesData, ies2: IesData, method: MixMethod) -> IesData:
newIes = IesData()
longAngles = list(set().union(ies1.getLongAngles(), ies2.getLongAngles()))
latAngles = list(set().union(ies1.getLatAngles(), ies2.getLatAngles()))
for angle in longAngles:
points = {}
for point in latAngles:
val1 = ies1.valueAt(long=angle, lat=point)
val2 = ies2.valueAt(long=angle, lat=point)
if method == MixMethod.ADD:
points[point] = val1 + val2
elif method == MixMethod.SUBTRACT:
points[point] = val1 - val2
elif method == MixMethod.MULTIPLY:
points[point] = val1 * val2
elif method == MixMethod.AVERAGE:
points[point] = (val1 + val2) / 2
elif method == MixMethod.MIN:
points[point] = min(val1, val2)
elif method == MixMethod.MAX:
points[point] = max(val1, val2)
newIes.addAngle(angle, IesAngle(points=points))
return newIes
def blankIesData(latRes: int = DEFAULT_LAT,
longRes=DEFAULT_LONG, intensity=0) -> IesData:
ies = IesData()
long = 0.0
while long < 360:
ies.addAngle(round(long, 2),
IesAngle(latRes=latRes, intensity=intensity))
long += round(360 / longRes, 2)
return ies
def spotlightIesData(angle, falloff, falloffMethod=FalloffMethod.SMOOTH,
lightDirection=LightDirection.DOWN,
latRes=DEFAULT_LAT, longRes=DEFAULT_LONG) -> IesData:
ies = IesData()
long = 0.0
while long <= 360:
points = {}
lat = 0
while lat <= 180:
if lightDirection is LightDirection.DOWN:
if lat < angle:
falloffSize = angle * falloff
falloffStart = angle - falloffSize
if lat > falloffStart and falloffSize != 0.0:
amount = (lat - falloffStart) / falloffSize
if falloffMethod == FalloffMethod.SMOOTH:
points[lat] = smoothInterpolate(1, 0, amount)
elif falloffMethod == FalloffMethod.LINEAR:
points[lat] = linearInterpolate(1, 0, amount)
elif falloffMethod == FalloffMethod.SHARP:
points[lat] = sharpInterpolate(1, 0, amount)
elif falloffMethod == FalloffMethod.ROOT:
points[lat] = rootInterpolate(1, 0, amount)
else:
points[lat] = 1.0
else:
points[lat] = 0.0
else:
if lat > 180 - angle:
falloffStart = 180 - angle
falloffSize = angle * falloff
if lat < falloffStart + falloffSize and falloffSize != 0.0:
amount = (lat - falloffStart) / falloffSize
if falloffMethod == FalloffMethod.SMOOTH:
points[lat] = smoothInterpolate(0, 1, amount)
elif falloffMethod == FalloffMethod.LINEAR:
points[lat] = linearInterpolate(0, 1, amount)
elif falloffMethod == FalloffMethod.SHARP:
points[lat] = sharpInterpolate(0, 1, amount)
elif falloffMethod == FalloffMethod.ROOT:
points[lat] = rootInterpolate(0, 1, amount)
else:
points[lat] = 1.0
else:
points[lat] = 0.0
lat += round(180 / latRes, 2)
ies.addAngle(round(long, 2), IesAngle(points=points))
long += 360 / longRes
return ies
def normalizeIesData(ies: IesData) -> IesData:
newIes = IesData()
peak = ies.getPeakBrightness()
for long in ies.getLongAngles():
points = {}
for lat in ies.getLatAngles():
if peak != 0:
points[lat] = ies.angles[long].points[lat] / peak
else:
points[lat] = 1
newIes.addAngle(long, IesAngle(points=points))
return newIes
def noiseIesData(latscale: float, latintensity: float,
longscale: float, longintensity: float,
seed: int = 0) -> IesData:
ies = blankIesData(latRes=latscale, longRes=longscale)
latnoise = {}
longnoise = {}
random.seed(seed)
for lat in ies.getLatAngles():
latnoise[lat] = 1-(random.randrange(0, int(latintensity*100), 1)/100)
for long in ies.getLongAngles():
longnoise[long] = 1-(random.randrange(0, int(latintensity*100), 1)/100)
for long in ies.getLongAngles():
for lat in ies.getLatAngles():
ies.angles[long].points[lat] = latnoise[lat] * longnoise[long]
return ies
def parseIesData(inp: str) -> IesData:
lines = [line.rstrip('\n') for line in inp]
dataStartLine = "TILT=NONE"
for idx, line in enumerate(lines):
ln = line.strip()
if ln.startswith("TILT"):
dataStartLine = line
break
data = inp.split(dataStartLine)[1].split(None)
# lumens = float(data[1])
# multiplyFactor = float(data[2])
latAnglesNum = int(float(data[3]))
longAnglesNum = int(float(data[4]))
# units = int(float(data[6]))
# openingWidth = float(data[7])
# openingLength = float(data[8])
# openingHeight = float(data[9])
ies = IesData()
latAnglesStart = 13
longAnglesStart = latAnglesStart + latAnglesNum
valuesStart = longAnglesStart + longAnglesNum
for angle in range(longAnglesStart, longAnglesStart + longAnglesNum):
angleNum = angle - longAnglesStart
angleDataStart = valuesStart + (angleNum * latAnglesNum)
points = {}
for lat in range(latAnglesStart, latAnglesStart + latAnglesNum):
latNum = lat - latAnglesStart
points[float(data[lat])] = float(data[angleDataStart + latNum])
ies.addAngle(float(data[angle]), IesAngle(points=points))
return normalizeIesData(ies)
| 2.59375 | 3 |
kolab/kotonoha/__init__.py | roy029/kolab | 0 | 12772669 | <reponame>roy029/kolab
from kolab.kotonoha.kotonoha import Kotonoha | 0.941406 | 1 |
roulette.py | CtrlAltDel668/kuroshiro101 | 0 | 12772670 | import random
import threading
class RR() :
def __init__(self,v):
self.v = v
def kapoy (self):
print('Russian Roulette is starting!!')
if self.v == 0 :
def random1():
print('Randomizing your punishment...')
timer = threading.Timer(1.0, random1)
timer.start()
def random2():
print('Randomizing your punishment....')
timer = threading.Timer(2.0, random2)
timer.start()
def random3():
print('Randomizing your punishment.....')
timer = threading.Timer(3.0, random3)
timer.start()
def start ():
punishment_ = ['[Shout you name 3 times]','[Kiss the forehead of your friend]','[Roll 3 times]',
'[Confess you embarassing moments in the past]',
'[Bang your head in the door]','[Slap your face 3 times]','[Imitate an animal for 10 secs]',
'[Sing your favorite song]',
'[Dance for 20 secs]','[Say HI! to a random person]']
rand_punishment = random.choice(punishment_)
print('***********************')
print(rand_punishment)
print('***********************')
timer = threading.Timer(4.0, start)
timer.start()
| 3.5625 | 4 |
Chapter_4/12.py | sai29/Python-John-Zelle-book | 14 | 12772671 | <filename>Chapter_4/12.py
def main():
print "This program illustrates a chaotic function"
x = input("Enter a number between 0 and 1: ")
print "Index",x
print "----------"
for i in range(10):
print i,
x = 3.9 * x * (1 - x)
print " ",x
main() | 3.84375 | 4 |
get_emails.py | fredsakr/eloqua-python-request | 6 | 12772672 | import sys
sys.path.append('./lib')
from eloqua_request import EloquaRequest
request = EloquaRequest('site', 'user', 'password')
response = request.get('/assets/emails?search=Demand*&page=1&count=50&depth=minimal', None)
| 1.828125 | 2 |
pusher/aiohttp.py | makingspace/pusher-http-python | 0 | 12772673 | <reponame>makingspace/pusher-http-python
# -*- coding: utf-8 -*-
from __future__ import (
print_function,
unicode_literals,
absolute_import,
division)
import aiohttp
import asyncio
from pusher.http import process_response
class AsyncIOBackend:
def __init__(self, client):
"""Adapter for the requests module.
:param client: pusher.Client object
"""
self.client = client
self.conn = aiohttp.TCPConnector()
def send_request(self, request):
method = request.method
url = "%s%s" % (request.base_url, request.path)
params = request.query_params
data = request.body
headers = request.headers
response = yield from asyncio.wait_for(
aiohttp.request(
method, url, params=params, data=data, headers=headers,
connector=self.conn),
timeout=self.client.timeout)
body = yield from response.read_and_close()
return process_response(response.status, body.decode('utf8'))
| 2.859375 | 3 |
openproblems/utils.py | bendemeo/SingleCellOpenProblems | 134 | 12772674 | <reponame>bendemeo/SingleCellOpenProblems
from .version import __version__
import decorator
import packaging.version
@decorator.decorator
def temporary(func, version=None, *args, **kwargs):
"""Decorate a function as a temporary fix.
Parameters
----------
version : str
Version after which this function should raise a RuntimeError
"""
if version is None:
raise TypeError("temporary() missing 1 required keyword argument: 'version'")
if packaging.version.parse(__version__) >= packaging.version.parse(version):
raise RuntimeError(
"Temporary function {}.{} is temporary and should not be used "
"after version {} (current version: {})".format(
func.__module__, func.__name__, version, __version__
)
)
return func(*args, **kwargs)
def get_members(module):
"""Get all public members from a module."""
namespace = [attr for attr in dir(module) if not attr.startswith("_")]
return [getattr(module, attr) for attr in namespace]
def get_callable_members(module):
"""Get all callable public members from a module."""
return [member for member in get_members(module) if callable(member)]
| 2.421875 | 2 |
RecoLuminosity/LumiDB/python/normFunctors.py | nistefan/cmssw | 0 | 12772675 | <gh_stars>0
import re,ast
class normFunctionFactory(object):
'''
luminorm and correction functions.
The result of the functions are correction factors, not final luminosity
all functions take 5 run time parameters, and arbituary named params
'''
def fPoly(self,luminonorm,intglumi,nBXs,whatev,whatav,a0=1.0,a1=0.0,a2=0.0,drift=0.0,c1=0.0,afterglow=''):
'''
input: luminonorm unit Hz/ub
output: correction factor to be applied on lumi in Hz/ub
'''
avglumi=0.
if c1 and nBXs>0:
avglumi=c1*luminonorm/nBXs
Afterglow=1.0
if len(afterglow)!=0:
afterglowmap=ast.literal_eval(afterglow)
for (bxthreshold,correction) in afterglowmap:
if nBXs >= bxthreshold :
Afterglow = correction
driftterm=1.0
if drift and intglumi:
driftterm=1.0+drift*intglumi
result=a0*Afterglow/(1+a1*avglumi+a2*avglumi*avglumi)*driftterm
return result
def fPolyScheme(self,luminonorm,intglumi,nBXs,fillschemeStr,fillschemePatterns,a0=1.0,a1=0.0,a2=0.0,drift=0.0,c1=0.0):
'''
input: luminonorm unit Hz/ub
input: fillschemePatterns [(patternStr,afterglow])
output: correction factor to be applied on lumi in Hz/ub
'''
avglumi=0.
if c1 and nBXs>0:
avglumi=c1*luminonorm/nBXs
Afterglow=1.0
if fillschemeStr and fillschemePatterns:
for apattern,cfactor in fillschemePatterns.items():
if re.match(apattern,fillschemeStr):
Afterglow=cfactor
driftterm=1.0
if drift and intglumi:
driftterm=1.0+drift*intglumi
result=a0*Afterglow/(1+a1*avglumi+a2*avglumi*avglumi)*driftterm
return result
def normFunctionCaller(funcName,*args,**kwds):
fac=normFunctionFactory()
try:
myfunc=getattr(fac,funcName,None)
except AttributeError:
print '[ERROR] unknown correction function '+funcName
raise
if callable(myfunc):
return myfunc(*args,**kwds)
else:
raise ValueError('uncallable function '+funcName)
if __name__ == '__main__':
#sim run 176796,ls=6
luminonorm=0.5061*1.0e3
intglumi=3.309 #/fb
nBXs=1331
constParams={'a0':1.0}
argvals=[luminonorm,intglumi,nBXs,0.0,0.0]
print 'no correction lumi in Hz/ub ',luminonorm*normFunctionCaller('fPoly',*argvals,**constParams)
polyParams={'a0':7.268,'a1':0.063,'a2':-0.0037,'drift':0.01258,'c1':6.37,'afterglow':'[(213,0.992), (321,0.99), (423,0.988), (597,0.985), (700,0.984), (873,0.981), (1041,0.979), (1179,0.977),(1317,0.975)]'}
print 'poly corrected lumi in Hz/ub',luminonorm*normFunctionCaller('fPoly',*argvals,**polyParams)
polyParams={'a0':7.268,'a1':0.063,'a2':-0.0037,'drift':0.0,'c1':6.37,'afterglow':'[(213,0.992), (321,0.99), (423,0.988), (597,0.985), (700,0.984), (873,0.981), (1041,0.979), (1179,0.977),(1317,0.975)]'}
print 'poly corrected without drift in Hz/ub ',luminonorm*normFunctionCaller('fPoly',*argvals,**polyParams)
constParams={'a0':7.268}
print 'const corrected lumi in Hz/ub',luminonorm*normFunctionCaller('fPoly',*argvals,**constParams)
| 2.46875 | 2 |
Multiprocessing/single.py | commoncdp2021/Gun-Gaja-Gun | 171 | 12772676 | <filename>Multiprocessing/single.py
#!/usr/bin/python
from gen_rand import gen_random_data
if __name__ == '__main__':
gen_random_data()
gen_random_data()
gen_random_data()
gen_random_data() | 1.546875 | 2 |
examples/app/users/Blueprint.py | hlop3z/basics | 0 | 12772677 | <reponame>hlop3z/basics<gh_stars>0
from basics import blueprints
name = 'users'
blueprint = blueprints.Blueprint( name )
RESPONSE = blueprints.response( name )
@blueprint.route
async def custom(model, payload):
form = model[ 'users' ].model.form
#form.create({})
#form.update({})
return RESPONSE(False, payload, 'custom-method')
@blueprint.route
async def list(model, payload):
db = await model[ 'users' ].find(
query = { "name" : { "like" : payload['search'] } },
fields = ['*'],
sort_by = '-id',
page = { "page": payload['page'], "size": payload['size'] }
)
return RESPONSE(False, db.data, 'custom-list')
| 2.53125 | 3 |
src/mobile_seg/modules/wrapper.py | murez/mobile-semantic-segmentation | 713 | 12772678 | <reponame>murez/mobile-semantic-segmentation
import torch
import torch.nn as nn
from mobile_seg.modules.net import MobileNetV2_unet
class Wrapper(nn.Module):
def __init__(
self,
unet: MobileNetV2_unet,
scale: float = 255.
):
super().__init__()
self.unet = unet
self.scale = scale
def forward(self, x):
x = x / self.scale
x = self.unet(x)
x = x * self.scale
x = torch.cat((x, x, x), dim=1)
return x
# %%
if __name__ == '__main__':
# %%
model = MobileNetV2_unet()
wrapper = Wrapper(model)
inputs = torch.randn((1, 3, 224, 224))
out = wrapper(inputs)
print(out.shape)
| 2.703125 | 3 |
applications/tests/conftest.py | Sukriva/tilavarauspalvelu-core | 0 | 12772679 | import datetime
import pytest
import pytz
from applications.models import (
Application,
ApplicationEvent,
ApplicationEventSchedule,
ApplicationRound,
)
@pytest.fixture
def default_application_round() -> ApplicationRound:
return ApplicationRound.objects.create(
application_period_begin=datetime.datetime(
year=2020, month=1, day=1, tzinfo=pytz.UTC
),
application_period_end=datetime.datetime(
year=2020, month=8, day=30, tzinfo=pytz.UTC
),
reservation_period_begin=datetime.date(year=2020, month=1, day=1),
reservation_period_end=datetime.date(year=2020, month=8, day=30),
public_display_begin=datetime.datetime(
year=2020, month=1, day=1, tzinfo=pytz.UTC
),
public_display_end=datetime.datetime(
year=2020, month=8, day=30, tzinfo=pytz.UTC
),
)
@pytest.fixture
def minimal_application(default_application_round) -> Application:
return Application.objects.create(application_round_id=default_application_round.id)
@pytest.fixture
def recurring_application_event(minimal_application) -> ApplicationEvent:
return ApplicationEvent.objects.create(
application=minimal_application,
num_persons=10,
min_duration=datetime.timedelta(hours=1),
max_duration=datetime.timedelta(hours=2),
name="Football",
events_per_week=2,
begin=datetime.date(year=2020, month=1, day=1),
end=datetime.date(year=2020, month=2, day=28),
biweekly=False,
)
@pytest.fixture
def recurring_bi_weekly_application_event(minimal_application) -> ApplicationEvent:
return ApplicationEvent.objects.create(
application=minimal_application,
num_persons=10,
min_duration=datetime.timedelta(hours=1),
max_duration=datetime.timedelta(hours=2),
name="Soccer",
events_per_week=2,
begin=datetime.date(year=2020, month=1, day=1),
end=datetime.date(year=2020, month=3, day=31),
biweekly=True,
)
@pytest.fixture
def scheduled_for_tuesday(recurring_application_event) -> ApplicationEventSchedule:
return ApplicationEventSchedule.objects.create(
day=1, begin="10:00", end="12:00", application_event=recurring_application_event
)
| 2.09375 | 2 |
jfk_fling/kernel.py | pelson/jfk-fling | 1 | 12772680 | from ipykernel.kernelbase import Kernel
import tempfile
import os
from .realtime_subprocess import RealTimeSubprocess
from .fprogram import FortranGatherer
class FortranKernel(Kernel):
implementation = 'jfk-fling'
implementation_version = '0.1'
language = 'Fortran'
language_version = 'F2008'
language_info = {'name': 'fortran',
'mimetype': 'text/plain',
'file_extension': '.f90'}
banner = ("Fortran kernel.\n"
"Uses $FC, compiles in F2008, and creates source code "
"files and executables in temporary folder.\n")
def __init__(self, *args, **kwargs):
super(FortranKernel, self).__init__(*args, **kwargs)
self.gatherer = FortranGatherer()
self.files_for_cleanup = []
self.fragment_accumulator = []
def cleanup_files(self):
"""Remove all the temporary files created by the kernel"""
for fname in self.files_for_cleanup:
os.remove(fname)
def new_temp_file(self, **kwargs):
"""Create a new temp file to be deleted when the kernel shuts down"""
fh = tempfile.NamedTemporaryFile(delete=False, mode='w', **kwargs)
self.files_for_cleanup.append(fh.name)
return fh
def _write_to_stdout(self, contents):
self.send_response(
self.iopub_socket, 'stream', {'name': 'stdout', 'text': contents})
def _write_to_stderr(self, contents):
self.send_response(
self.iopub_socket, 'stream', {'name': 'stderr', 'text': contents})
def create_jupyter_subprocess(self, cmd):
return RealTimeSubprocess(
cmd,
lambda contents: self._write_to_stdout(contents.decode()),
lambda contents: self._write_to_stderr(contents.decode()))
def compile_with_gfortran(self, source_filename, binary_filename):
compiler = os.environ.get('FC', 'gfortran')
fflags = os.environ.get('FFLAGS', '').split(' ')
args = ([compiler, source_filename, '-std=f2008'] +
fflags +
['-o', binary_filename])
return self.create_jupyter_subprocess(args)
def split_magics(self, code):
code_lines = []
magics = []
lines = code.split('\n')
state = 'magics'
for line in lines:
if state == 'magics':
if line.startswith('%'):
magics.append(line.lstrip('%'))
continue
elif not line:
continue
state = 'code'
code_lines.append(line)
return magics, '\n'.join(code_lines)
def do_execute(self, code, silent, store_history=True,
user_expressions=None, allow_stdin=False):
response_template = {
'status': 'ok', 'execution_count': self.execution_count,
'payload': [], 'user_expressions': {}}
fragment = False
magics, code = self.split_magics(code)
if 'code' in magics:
if code.strip():
self._write_to_stderr(
'The %code magic must not have code body.')
self._write_to_stdout(self.gatherer.to_program())
return response_template
elif 'clear' in magics:
self.gatherer.clear()
elif 'fragment' in magics:
fragment = True
self.fragment_accumulator.append(code)
return response_template
if self.fragment_accumulator:
code = '\n'.join(self.fragment_accumulator + [code])
self.fragment_accumulator = []
try:
self.gatherer.extend(code)
except Exception as exception:
msg = '[FAILED TO PARSE:] {}'.format(str(exception))
self._write_to_stderr(msg)
return response_template
program_code = self.gatherer.to_program()
with self.new_temp_file(suffix='.f90') as source_file:
source_file.write(program_code)
source_file.flush()
if fragment:
return response_template
with self.new_temp_file(suffix='.out') as binary_file:
p = self.compile_with_gfortran(
source_file.name, binary_file.name)
while p.poll() is None:
p.write_contents()
p.write_contents()
if p.returncode != 0: # Compilation failed
# Remove the most recently added sub-program.
del self.gatherer.programs[-1]
msg = ("[Fortran kernel] gfortran exited with code {}, "
"the executable will not be executed"
.format(p.returncode))
self._write_to_stderr(msg)
return response_template
p = self.create_jupyter_subprocess(binary_file.name)
while p.poll() is None:
p.write_contents()
p.write_contents()
if p.returncode != 0:
# e.g. segfault...
del self.gatherer.programs[-1]
msg = ("[Fortran kernel] Executable exited with code {}"
"".format(p.returncode))
self._write_to_stderr(msg)
return response_template
def do_shutdown(self, restart):
# Cleanup the created source code files and executables when
# shutting down the kernel.
self.cleanup_files()
| 2.375 | 2 |
summoner.py | cpmoni/sw-enhance | 0 | 12772681 | import json
from monster import Monster
from rune import Rune, Grind, Gem
from constant_maps import *
class Summoner:
def __init__(self,filename):
self.data_file = filename
print(f'Loading data from {filename}')
with open(filename) as fin:
data = json.load(fin)
self.mons = {}
self.runes = {}
self.grinds = []
self.gems = []
self.parse_mons(data['unit_list'])
self.parse_runes(data['runes'])
self.parse_grinds(data['rune_craft_item_list'])
self.grinds.sort(key=lambda x: (x.set, x.stat, -x.grade))
def parse_mons(self,mon_list):
for mon in mon_list:
m = Monster(mon)
self.mons[m.id] = m
self.parse_runes(mon['runes'])
def parse_grinds(self,rune_craft_list):
for gg in rune_craft_list:
if gg['craft_type'] == 1 or gg['craft_type'] == 3: self.gems.append(Gem(gg['craft_type_id'],gg['sell_value']))
else: self.grinds.append(Grind(gg['craft_type_id'],gg['sell_value']))
def parse_runes(self,rune_list):
for rune in rune_list:
r = Rune(rune)
self.runes[r.id] = r
def find_rune(self,rune_id):
location = self.runes[rune_id].location
if location != 0:
location = self.mons[location]
else: location = 'Inventory'
return location
def print_runes(self):
for rune_id in self.runes:
print(self.runes[rune_id].str_with_subs())
print(self.find_rune(rune_id))
print()
def analyze_reapps(self,n=10):
poss = []
for rune_id in self.runes:
s = self.runes[rune_id].reapp
if s > 0:
poss.append((rune_id,s))
poss.sort(key=lambda x: x[1],reverse=True)
for i in range(n):
print('Option',i+1)
rune_id, s = poss[i]
print(self.runes[rune_id].str_with_subs())
print('On {}, Score {}'.format(self.find_rune(rune_id),s))
print(rune_id)
def analyze_grinds(self):
counts = {}
poss = {}
for grind in self.grinds:
k = (grind.set,grind.stat,grind.grade)
if k in counts: counts[k] += 1
else:
counts[k] = 1
poss[k] = []
for rune_id in self.runes:
r = self.runes[rune_id]
if r.level < 12 or r.reapp > .5: continue
if r.set != grind.set and grind.set != 99: continue
for sub in r.subs:
if sub['stat'] == grind.stat:
if sub['grind'] < grind.get_max():
poss[k].append((r,grind.get_max()-sub['grind'],grind.stat))
for k in poss:
sset, stat, grade = k
s = 'Grind'
c = counts[k]
if c > 1: s+= 's'
print('{} {} {} {} {}'.format(c,rune['quality'][grade],rune['sets'][sset],rune['effectTypes'][stat],s))
runes = poss[k]
runes.sort(key=lambda x: (-x[1],-(x[0].set),-(x[0].location)))
for x in runes:
print(f"{x[0]} at {self.find_rune(x[0].id)} can improve {x[1]} {rune['effectTypes'][x[2]]}")
print()
| 3.015625 | 3 |
xontrib/mpl.py | jianlingzhong/xonsh | 0 | 12772682 | <reponame>jianlingzhong/xonsh<gh_stars>0
"""Matplotlib xontribution."""
from xonsh.proc import foreground as foreground
__all__ = ()
@foreground
def mpl(args, stdin=None):
"""Hooks to matplotlib"""
from xontrib.mplhooks import show
show()
aliases['mpl'] = mpl
| 1.539063 | 2 |
devito/core/intel.py | alisiahkoohi/devito | 0 | 12772683 | from devito.core.cpu import CPU64Operator, CPU64OpenMPOperator
from devito.exceptions import InvalidOperator
from devito.passes.clusters import (Blocking, Lift, cire, cse, eliminate_arrays,
extract_increments, factorize, fuse, optimize_pows)
from devito.tools import timed_pass
__all__ = ['Intel64Operator', 'Intel64OpenMPOperator', 'Intel64FSGOperator',
'Intel64FSGOpenMPOperator']
Intel64Operator = CPU64Operator
Intel64OpenMPOperator = CPU64OpenMPOperator
class Intel64FSGOperator(Intel64Operator):
"""
Operator with performance optimizations tailored "For Small Grids" (FSG).
"""
@classmethod
def _normalize_kwargs(cls, **kwargs):
kwargs = super(Intel64FSGOperator, cls)._normalize_kwargs(**kwargs)
if kwargs['options']['min-storage']:
raise InvalidOperator('You should not use `min-storage` with `advanced-fsg '
' as they work in opposite directions')
return kwargs
@classmethod
@timed_pass(name='specializing.Clusters')
def _specialize_clusters(cls, clusters, **kwargs):
options = kwargs['options']
platform = kwargs['platform']
sregistry = kwargs['sregistry']
# Toposort+Fusion (the former to expose more fusion opportunities)
clusters = fuse(clusters, toposort=True)
# Hoist and optimize Dimension-invariant sub-expressions
clusters = cire(clusters, 'invariants', sregistry, options, platform)
clusters = Lift().process(clusters)
# Reduce flops (potential arithmetic alterations)
clusters = extract_increments(clusters, sregistry)
clusters = cire(clusters, 'sops', sregistry, options, platform)
clusters = factorize(clusters)
clusters = optimize_pows(clusters)
# The previous passes may have created fusion opportunities, which in
# turn may enable further optimizations
clusters = fuse(clusters)
clusters = eliminate_arrays(clusters)
# Reduce flops (no arithmetic alterations)
clusters = cse(clusters, sregistry)
# Blocking to improve data locality
clusters = Blocking(options).process(clusters)
return clusters
class Intel64FSGOpenMPOperator(Intel64FSGOperator, CPU64OpenMPOperator):
_specialize_iet = CPU64OpenMPOperator._specialize_iet
| 1.9375 | 2 |
userbot/modules/kinghelper.py | raisnug980/King-Userbot | 1 | 12772684 | <gh_stars>1-10
""" Userbot module for other small commands. """
from userbot import CMD_HELP, ALIVE_NAME
from userbot.events import register
# ================= CONSTANT =================
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else uname().node
# ============================================
@register(outgoing=True, pattern="^.khelp$")
async def usit(e):
await e.edit(
f" ╔════════════╗\n **__⚡️BANTUAN⚡️__** \n╚════════════╝ \n"
f"**Hai King {DEFAULTUSER} Kalau Anda Tidak Tau Perintah Untuk Memerintah Ku Ketik** `.help` Atau Bisa Minta Bantuan Ke:\n"
"═⎆ developer : [King Apis](t.me/PacarFerdilla) \n"
"═⎆ Repository : [King-Userbot](https://github.com/apisuserbot/King-Userbot) \n"
"═⎆ Instragam : [Instagram King Apis](Instagram.com/apis_goodboy) \n"
"═⎆ Grup Support : [King Userbot Support](https://t.me/KingUserbotSupport)"
)
@register(outgoing=True, pattern="^.vars$")
async def var(m):
await m.edit(
f" ╔════════════╗\n **__⚡️DAFTAR VARS⚡️__** \n╚════════════╝ \n"
f"**Disini Daftar Vars Dari King** {DEFAULTUSER} :\n"
"═⎆ Daftar Vars : [DAFTAR VARS](https://raw.githubusercontent.com/apisuserbot/King-Userbot/King-Userbot/varshelper.txt)"
)
CMD_HELP.update(
{
"helper": "**✘ Plugin :** `Helper`\
\n\n • **Perintah :** `.khelp`\
\n • **Function : **Bantuan Untuk ⚡️𝗞𝗶𝗻𝗴-𝙐𝙎𝙀𝙍𝘽𝙊𝙏⚡️\
\n\n • **Perintah :** `.vars`\
\n • **Function : **Melihat Daftar Vars\
"
}
)
| 2.1875 | 2 |
script/maeplot/datafile.py | Rookfighter/MultiAgentExploration | 4 | 12772685 | <gh_stars>1-10
import os
class DataFile:
def __init__(self):
self.reset()
def reset(self):
self.data_ = []
self.comments_ = []
self.loadedFrom_ = ""
def load(self, filepath):
assert(os.path.isfile(filepath))
self.reset()
self.loadedFrom_ = filepath
with open(self.loadedFrom_, 'r') as f:
for line in f:
# only lines that are no comments are valid
if line[0] == '#':
self.comments_.append(line)
else:
values = line.split(' ')
if len(self.data_) == 0:
while len(self.data_) < len(values):
self.data_.append([])
assert(len(values) == len(self.data_))
# add data strings to correct column
for value, dataList in zip(values, self.data_):
dataList.append(value)
def save(self, filepath):
with open(filepath, 'w') as f:
# add comments in top of file
commentContent = ""
for comment in self.comments_:
commentContent += comment + "\n"
commentContent.strip()
f.write(commentContent)
# after comments add data
if self.data_:
dataContent = ""
for line in zip(*self.data_):
for value in line:
dataContent += value + " "
dataContent = dataContent.strip()
dataContent += "\n"
dataContent = dataContent.strip()
f.write(dataContent)
def getDataAs(self, formatStr):
assert(len(self.data_) == len(formatStr))
result = []
while len(result) < len(self.data_):
result.append([])
for fmt, dataColumn, resultColumn in zip(formatStr, self.data_, result):
for value in dataColumn:
resultColumn.append(self.strToNum(value, fmt))
return result;
def setDataAs(self, data, formatStr):
assert(len(data) == len(formatStr))
self.data_ = []
while len(self.data_) < len(data):
self.data_.append([])
for fmt, dataColumn, toSetColumn in zip(formatStr, self.data_, data):
for value in toSetColumn:
dataColumn.append(self.numToStr(value, fmt))
def strToNum(self, numString, fmt):
if fmt == 'f':
return float(numString)
elif fmt == 'l':
return long(numString)
elif fmt == 'i':
return int(numString)
else:
raise ValueError("unknown format '" + format + "'")
def numToStr(self, num, fmt):
if fmt == 'f':
return repr(num)
elif fmt == 'l':
return str(num)
elif fmt == 'i':
return str(num)
else:
raise ValueError("unknown format '" + format + "'")
def addComment(self, comment):
self.comments_.append(comment) | 3.171875 | 3 |
sdk/yapily/models/payment_status_details.py | DarrahK/yapily-sdk-python | 0 | 12772686 | # coding: utf-8
"""
Yapily API
To access endpoints that require authentication, use your application key and secret created in the Dashboard (https://dashboard.yapily.com) # noqa: E501
The version of the OpenAPI document: 1.157.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from yapily.configuration import Configuration
class PaymentStatusDetails(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'status': 'str',
'status_reason': 'str',
'status_reason_description': 'str',
'status_update_date': 'datetime',
'multi_authorisation_status': 'MultiAuthorisation',
'iso_status': 'PaymentIsoStatus'
}
attribute_map = {
'status': 'status',
'status_reason': 'statusReason',
'status_reason_description': 'statusReasonDescription',
'status_update_date': 'statusUpdateDate',
'multi_authorisation_status': 'multiAuthorisationStatus',
'iso_status': 'isoStatus'
}
def __init__(self, status=None, status_reason=None, status_reason_description=None, status_update_date=None, multi_authorisation_status=None, iso_status=None, local_vars_configuration=None): # noqa: E501
"""PaymentStatusDetails - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._status = None
self._status_reason = None
self._status_reason_description = None
self._status_update_date = None
self._multi_authorisation_status = None
self._iso_status = None
self.discriminator = None
if status is not None:
self.status = status
if status_reason is not None:
self.status_reason = status_reason
if status_reason_description is not None:
self.status_reason_description = status_reason_description
if status_update_date is not None:
self.status_update_date = status_update_date
if multi_authorisation_status is not None:
self.multi_authorisation_status = multi_authorisation_status
if iso_status is not None:
self.iso_status = iso_status
@property
def status(self):
"""Gets the status of this PaymentStatusDetails. # noqa: E501
:return: The status of this PaymentStatusDetails. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this PaymentStatusDetails.
:param status: The status of this PaymentStatusDetails. # noqa: E501
:type: str
"""
allowed_values = ["PENDING", "FAILED", "DECLINED", "COMPLETED", "COMPLETED_SETTLEMENT_IN_PROCESS", "EXPIRED", "UNKNOWN", "ACTIVE", "INACTIVE"] # noqa: E501
if self.local_vars_configuration.client_side_validation and status not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}" # noqa: E501
.format(status, allowed_values)
)
self._status = status
@property
def status_reason(self):
"""Gets the status_reason of this PaymentStatusDetails. # noqa: E501
:return: The status_reason of this PaymentStatusDetails. # noqa: E501
:rtype: str
"""
return self._status_reason
@status_reason.setter
def status_reason(self, status_reason):
"""Sets the status_reason of this PaymentStatusDetails.
:param status_reason: The status_reason of this PaymentStatusDetails. # noqa: E501
:type: str
"""
self._status_reason = status_reason
@property
def status_reason_description(self):
"""Gets the status_reason_description of this PaymentStatusDetails. # noqa: E501
:return: The status_reason_description of this PaymentStatusDetails. # noqa: E501
:rtype: str
"""
return self._status_reason_description
@status_reason_description.setter
def status_reason_description(self, status_reason_description):
"""Sets the status_reason_description of this PaymentStatusDetails.
:param status_reason_description: The status_reason_description of this PaymentStatusDetails. # noqa: E501
:type: str
"""
self._status_reason_description = status_reason_description
@property
def status_update_date(self):
"""Gets the status_update_date of this PaymentStatusDetails. # noqa: E501
:return: The status_update_date of this PaymentStatusDetails. # noqa: E501
:rtype: datetime
"""
return self._status_update_date
@status_update_date.setter
def status_update_date(self, status_update_date):
"""Sets the status_update_date of this PaymentStatusDetails.
:param status_update_date: The status_update_date of this PaymentStatusDetails. # noqa: E501
:type: datetime
"""
self._status_update_date = status_update_date
@property
def multi_authorisation_status(self):
"""Gets the multi_authorisation_status of this PaymentStatusDetails. # noqa: E501
:return: The multi_authorisation_status of this PaymentStatusDetails. # noqa: E501
:rtype: MultiAuthorisation
"""
return self._multi_authorisation_status
@multi_authorisation_status.setter
def multi_authorisation_status(self, multi_authorisation_status):
"""Sets the multi_authorisation_status of this PaymentStatusDetails.
:param multi_authorisation_status: The multi_authorisation_status of this PaymentStatusDetails. # noqa: E501
:type: MultiAuthorisation
"""
self._multi_authorisation_status = multi_authorisation_status
@property
def iso_status(self):
"""Gets the iso_status of this PaymentStatusDetails. # noqa: E501
:return: The iso_status of this PaymentStatusDetails. # noqa: E501
:rtype: PaymentIsoStatus
"""
return self._iso_status
@iso_status.setter
def iso_status(self, iso_status):
"""Sets the iso_status of this PaymentStatusDetails.
:param iso_status: The iso_status of this PaymentStatusDetails. # noqa: E501
:type: PaymentIsoStatus
"""
self._iso_status = iso_status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PaymentStatusDetails):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, PaymentStatusDetails):
return True
return self.to_dict() != other.to_dict()
| 2.203125 | 2 |
util/tools.py | imzengyang/appiumexample | 0 | 12772687 | import os
class Tools(object):
def getRootPath(self):
'''
获取上级目录的路径
:return:
'''
rootpath = os.path.dirname(os.path.abspath(__file__))
while rootpath:
if os.path.exists(os.path.join(rootpath, 'readme.md')):
break
rootpath = rootpath[0:rootpath.rfind(os.path.sep)]
return rootpath
def main():
tools = Tools()
rootpath = tools.getRootPath()
apkpath = os.path.join(rootpath,'apks','cnode.apk')
print(apkpath)
if __name__ == '__main__':
d=Tools().getRootPath()
print(d)
main() | 2.90625 | 3 |
src/utils.py | EricvanSchaik/opiniondigest | 0 | 12772688 | # Copyright 2019 <NAME>, Inc. and the University of Edinburgh. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
import commentjson
class Config:
def __init__(self,
filepath: str) -> None:
self.gold_name = False
if ".csv" in filepath:
# Assume aggregated file
filename = os.path.basename(filepath)
if "gold" in filename:
conf_name = "-".join(filename.split(".")[0].split("_")[2:])
self.gold_name = True
else:
conf_name = "-".join(filename.split(".")[0].split("_")[1:])
conf_type = "aggregate"
self.is_csv = True
self.config = None
else:
filename = os.path.basename(filepath)
names = filename.split("_")
conf_type = names[0]
conf_name = ".".join(
"_".join(names[1:]).split(".")[:-1])
self.is_csv = False
self.load_config(filepath)
if len(conf_name) == 0:
raise ValueError("Config name cannot be empty: {}".format(filename))
if conf_type in ["prepare",
"train",
"aggregate",
"generate"]:
self.conf_type = conf_type
self.conf_name = conf_name
else:
raise ValueError("Invalid config file name: {}".format(filename))
def load_config(self,
filepath: str) -> None:
with open(filepath, "r") as fin:
lines = fin.readlines()
json_str = "\n".join(lines)
self.config = commentjson.loads(json_str)
def get_agg_name(self):
""" Generate aggregation target file name."""
assert self.conf_type == "aggregate"
if self.is_csv:
if self.gold_name:
agg_name = "_gold_{}".format(self.conf_name.replace('-', '_'))
else:
agg_name = "_{}".format(self.conf_name.replace('-', '_'))
else:
agg_name = "_{}_{}_{}_{}_{}_{}".format(self.config["num_review"],
self.config["top_k"],
"all",
self.config["sentiment"],
self.config["embedding"][-3:],
str(int(self.config["threshold"] * 10))
)
"""
agg_name = "_{}_{}_{}_{}_{}".format(self.config["num_review"],
self.config["top_k"],
self.config["attribute"],
self.config["sentiment"],
self.config["embedding"][-3:],
str(int(self.config["threshold"] * 10))
)
"""
return agg_name
def __getitem__(self,
key: str):
if key not in self.config:
raise KeyError(key)
return self.config[key]
def __contains__(self,
key: str):
return key in self.config
if __name__ == "__main__":
conf = Config("train_test.json")
| 2.46875 | 2 |
covidfaq/converter.py | stanford-oval/covid-faq | 0 | 12772689 | <reponame>stanford-oval/covid-faq
#
# Copyright 2021 The Board of Trustees of the Leland Stanford Junior University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: kevintangzero
import json
import tablib
from tablib import Dataset
from .data import Dataset
def parse_argv(parser):
parser.add_argument('-d', '--data', type=str, required=True,
help="Path to data directory")
parser.add_argument('-f', '--format', type=str, required=True, choices=['json', 'xls'])
parser.add_argument('-o', '--output', type=str, required=True)
def main(argv):
dataset = Dataset(argv.data)
data = tablib.Dataset(headers=['Question', 'Answer'])
for q, a in dataset:
data.append((q, a))
if argv.format == 'json':
with open(argv.output, 'w') as f:
json.dump(data.export('json'), f, indent=4)
else:
with open(argv.output, 'wb') as f:
f.write(data.export('xls'))
| 2.734375 | 3 |
code/model_routines.py | alnebe/caspFilter | 0 | 12772690 | from tensorflow.keras import backend as K
from tensorflow.keras.metrics import SpecificityAtSensitivity
from tensorflow.keras.metrics import Precision
from tensorflow.keras.metrics import Recall, FalsePositives, FalseNegatives, TruePositives, TrueNegatives
def recall(y_true, y_pred):
m = Recall()
m.update_state(y_true, y_pred)
pre_res = m.result()
res = pre_res.numpy()
return res
def f1(y_true, y_pred):
m = Precision()
m.update_state(y_true, y_pred)
pre_precision = m.result()
precision = pre_precision.numpy()
m = Recall()
m.update_state(y_true, y_pred)
pre_recall = m.result()
recall = pre_recall.numpy()
return 2 * ((precision * recall) / (precision + recall + K.epsilon()))
def balanced_acc(y_true, y_pred):
selectivity = recall(y_true, y_pred)
specificity = SpecificityAtSensitivity(selectivity)
specificity.update_state(y_true, y_pred)
specificity = specificity.result().numpy()
return (selectivity + specificity) / 2
def new_bac(y_true, y_pred):
m = Recall()
m.update_state(y_true, y_pred)
pre_recall = m.result()
recall = pre_recall.numpy()
n = FalsePositives()
n.update_state(y_true, y_pred)
pre_res_2 = n.result()
fp = pre_res_2.numpy()
l = TrueNegatives()
l.update_state(y_true, y_pred)
pre_res = l.result()
tn = pre_res.numpy()
specificity = tn / (tn + fp)
return (recall + specificity) / 2 | 2.34375 | 2 |
parsers/US_TX.py | Gazer75/electricitymap-contrib | 1 | 12772691 | <filename>parsers/US_TX.py<gh_stars>1-10
#!/usr/bin/env python3
"""Parser for the ERCOT area of the United States. (~85% of Texas)"""
import csv
import io
import logging
import zipfile
import arrow
import requests
from lxml import html
from .lib.exceptions import ParserException
# This xpath gets the second cell in a row which has a cell that contains parameterized text
REAL_TIME_DATA_XPATH = "//tr[td[contains(text(),'{}')]]/td[2]/text()"
SOLAR_REAL_TIME_DIRECTORY_URL = 'http://mis.ercot.com/misapp/GetReports.do?reportTypeId=13484'
WIND_REAL_TIME_DIRECTORY_URL = 'http://mis.ercot.com/misapp/GetReports.do?reportTypeId=13071'
REAL_TIME_URL = 'http://www.ercot.com/content/cdr/html/real_time_system_conditions.html'
BASE_ZIP_URL = 'http://mis.ercot.com/'
ELECTRICAL_TIES = [
"DC_E",
"DC_L",
"DC_N",
"DC_R",
"DC_S"
]
def get_zipped_csv_data(logger, directory_url, session=None):
"""Returns 5 minute generation data in json format."""
s = session or requests.session()
response = s.get(directory_url)
if response.status_code != 200 or not response.content:
raise ParserException('US-TX', 'Response code: {0}'.format(response.status_code))
html_tree = html.fromstring(response.content)
# This xpath gets the first row to contain 'csv' and then the zip link
most_recent_csv_zip_url = BASE_ZIP_URL + html_tree.xpath("//tr[td[contains(text(),'csv')]]/td/div/a/@href")[0]
response = s.get(most_recent_csv_zip_url)
if response.status_code != 200 or not response.content:
raise ParserException('US-TX', 'Response code: {0}'.format(response.status_code))
zip_file = zipfile.ZipFile(io.BytesIO(response.content))
reader = csv.reader(io.StringIO(zip_file.read(zip_file.namelist()[0]).decode('utf-8')))
next(reader) # skip header
row = next(reader) # only get first row
return arrow.get(arrow.get(row[0], 'MM/DD/YYYY HH:mm').datetime, 'US/Central').datetime, float(row[1])
def get_realtime_data(logger, session=None):
s = session or requests.session()
response = s.get(REAL_TIME_URL)
if response.status_code != 200 or not response.content:
raise ParserException('US-TX', 'Response code: {0}'.format(response.status_code))
html_tree = html.fromstring(response.content)
demand = float(html_tree.xpath(REAL_TIME_DATA_XPATH.format("Actual System Demand"))[0])
tie_dict = {}
for tie in ELECTRICAL_TIES:
tie_dict[tie] = float(html_tree.xpath(REAL_TIME_DATA_XPATH.format(tie))[0])
# This xpath gets the text from the timestamp
date_string = str(html_tree.xpath("//div[contains(@class,'schedTime')]/text()")[0]).replace("Last Updated: ", "")
return arrow.get(arrow.get(date_string, 'MMM DD, YYYY HH:mm:ss').datetime, 'US/Central').datetime, demand, tie_dict
def fetch_production(zone_key='US-TX', session=None, target_datetime=None, logger=logging.getLogger(__name__)):
"""
Requests the last known production mix (in MW) of a given country
Arguments:
zone_key (optional) -- used in case a parser is able to fetch multiple countries
session (optional) -- request session passed in order to re-use an existing session
target_datetime (optional) -- used if parser can fetch data for a specific day
logger (optional) -- handles logging when parser is run as main
Return:
A dictionary in the form:
{
'zoneKey': 'FR',
'datetime': '2017-01-01T00:00:00Z',
'production': {
'biomass': 0.0,
'coal': 0.0,
'gas': 0.0,
'hydro': 0.0,
'nuclear': null,
'oil': 0.0,
'solar': 0.0,
'wind': 0.0,
'geothermal': 0.0,
'unknown': 0.0
},
'storage': {
'hydro': -10.0,
},
'source': 'mysource.com'
}
"""
if target_datetime:
raise NotImplementedError('This parser is not yet able to parse past dates')
wind_datetime, wind = get_zipped_csv_data(logger, WIND_REAL_TIME_DIRECTORY_URL, session=session)
solar_datetime, solar = get_zipped_csv_data(logger, SOLAR_REAL_TIME_DIRECTORY_URL, session=session)
wind_solar_timedelta = (wind_datetime - solar_datetime).total_seconds() / 60
if abs(wind_solar_timedelta) > 4: # in case one was grabbed before the other was posted
if wind_solar_timedelta > 0: # if solar came earlier, poll it again
solar_datetime, solar = get_zipped_csv_data(logger, SOLAR_REAL_TIME_DIRECTORY_URL, session=session)
else: # if wind came earlier poll it again
wind_datetime, wind = get_zipped_csv_data(logger, WIND_REAL_TIME_DIRECTORY_URL, session=session)
if solar < 0:
logger.warn('Solar production for US_TX was reported as less than 0 and was clamped')
solar = 0.0
if wind < 0:
logger.warn('Wind production for US_TX was reported as less than 0 and was clamped')
wind = 0.0
realtime_datetime, demand, ties = get_realtime_data(logger, session=session)
data = {
'zoneKey': zone_key,
'datetime': wind_datetime,
'production': {
'solar': solar,
'wind': wind,
'unknown': demand - solar - wind - sum(ties.values())
},
'storage': {},
'source': 'ercot.com'
}
return data
def fetch_consumption(zone_key='US-TX', session=None, target_datetime=None,
logger=logging.getLogger(__name__)):
"""Gets consumption for a specified zone, returns a dictionary."""
realtime_datetime, demand, ties = get_realtime_data(logger, session=session)
data = {
'zoneKey': zone_key,
'datetime': realtime_datetime,
'consumption': demand,
'source': 'ercot.eu'
}
return data
if __name__ == '__main__':
print('fetch_production() ->')
print(fetch_production())
print('fetch_consumption() ->')
print(fetch_consumption())
| 2.875 | 3 |
CoderPro/day4/solution.py | akashzcoder/coding_discipline | 0 | 12772692 | <reponame>akashzcoder/coding_discipline
def sortNums(nums):
# constant space solution
i = 0
j = len(nums) - 1
index = 0
while index <= j:
if nums[index] == 1:
nums[index], nums[i] = nums[i], nums[index]
index += 1
i += 1
if nums[index] == 2:
index += 1
if nums[index] == 3:
nums[index], nums[j] = nums[j], nums[index]
j -= 1
return nums
print(sortNums([2, 3, 2, 2, 3, 2, 3, 1, 1, 2, 1, 3]))
| 3.796875 | 4 |
nanopore-human-transcriptome/scripts/bulk_signal_read_correction/make_reads.py | hasindu2008/NA12878 | 345 | 12772693 | import sys
from pathlib import Path
from argparse import ArgumentParser
import h5py
import pandas as pd
import numpy as np
from tqdm import tqdm
from export import export_read_file
def get_args():
parser = ArgumentParser(description="Parse sequencing_summary.txt files and .paf files to find split reads "
"in an Oxford Nanopore Dataset",
add_help=False)
general = parser.add_argument_group(title='General options')
general.add_argument("-h", "--help",
action="help",
help="Show this help and exit"
)
in_args = parser.add_argument_group(
title='Input sources'
)
in_args.add_argument("-s", "--summary",
required=True,
nargs='+',
help='Sequencing summary file(s) generated by albacore or guppy. Can be compressed '
'using gzip, bzip2, xz, or zip')
in_args.add_argument("--start-events",
help="start_events.csv file generated by event_finder.py",
default="",
required=True,
)
in_args.add_argument("--end-events",
help="end_events.csv file generated by event_finder.py",
default="",
required=True,
)
in_args.add_argument("--targets",
help="A text file of target read ids with one per line.",
default="",
required=True,
)
in_args.add_argument("--bulk-files",
help="ONT bulk FAST5 files.",
nargs='+',
default="",
)
in_args.add_argument("-o", "--output-name",
help="Name of the output folder, this will be generated if it does not exist",
required=True,
default=""
)
in_args.add_argument("--extra-classifications",
help="Any extra MinKNOW classifications to include.",
nargs='*',
default="",
)
return parser.parse_args()
def main():
args = get_args()
# debug(args)
# # sys.exit()
# Make folders
for j in ['starts', 'ends']:
Path('{i}/{j}/{k}'.format(i=args.output_name, j=j, k='fast5')).mkdir(parents=True, exist_ok=True)
# Open files
start_events = pd.read_csv(args.start_events, sep=',')
end_events = pd.read_csv(args.end_events, sep=',')
seq_sum_df = concat_files_to_df(file_list=args.summary, sep='\t')
# Create end_time Series in seq_sum_df
seq_sum_df['end_time'] = seq_sum_df['start_time'] + seq_sum_df['duration']
# Sort and Groupby to segregate runs and channels
seq_sum_df = seq_sum_df.sort_values(by=['run_id', 'channel', 'start_time'], ascending=True)
seq_sum_df_1 = seq_sum_df.copy()
gb = seq_sum_df.groupby(['run_id', 'channel'])
gb1 = seq_sum_df_1.groupby(['run_id', 'channel'])
# Get previous and next start times within groupby
seq_sum_df['next_start'] = gb['start_time'].shift(-1)
seq_sum_df_1['prev_start'] = gb1['start_time'].shift(1)
target_read_ids = []
with open(args.targets, 'r') as file:
for line in file:
target_read_ids.append(line.strip())
classifications = ['pore', 'inrange', 'good_single', 'unblocking']
if args.extra_classifications:
classifications.extend(args.extra_classifications)
# Get end_events for target_read_ids
end_events = end_events[end_events['read_id'].isin(target_read_ids)]
normal_ending_ids = end_events[end_events['time'].ge(0) &
end_events['label'].isin(classifications)]['read_id'].unique()
abnormally_ending_ids = end_events[~end_events['read_id'].isin(normal_ending_ids)]['read_id'].unique()
end_target_ss = seq_sum_df[seq_sum_df['read_id'].isin(abnormally_ending_ids)]
# Get start_events for target_read_ids
start_events = start_events[start_events['read_id'].isin(target_read_ids)]
normal_starting_ids = start_events[start_events['time'].le(0) &
start_events['label'].isin(classifications)]['read_id'].unique()
abnormally_starting_ids = start_events[~start_events['read_id'].isin(normal_starting_ids)]['read_id'].unique()
start_target_ss = seq_sum_df_1[seq_sum_df_1['read_id'].isin(abnormally_starting_ids)]
print('Collecting abnormally ending reads:')
end_read_info = write_files(end_target_ss, args.bulk_files, 'start_time',
'next_start', '{i}/ends/fast5/'.format(i=args.output_name))
end_read_info.to_csv('{}/ends_read_info.txt'.format(args.output_name), sep='\t', index=False, header=True)
end_read_info.to_csv('{}/ends_filenames.txt'.format(args.output_name), sep='\t', index=False, header=False,
columns=['filename'])
print('Collecting abnormally starting reads:')
start_read_info = write_files(start_target_ss, args.bulk_files, 'prev_start',
'end_time', '{i}/starts/fast5/'.format(i=args.output_name))
start_read_info.to_csv('{}/starts_read_info.txt'.format(args.output_name), sep='\t', index=False, header=True)
start_read_info.to_csv('{}/starts_filenames.txt'.format(args.output_name), sep='\t', index=False, header=False,
columns=['filename'])
return
def write_files(target_ss, bulkfiles, read_start_col, read_end_col, export_path, remove_pore=True):
"""Abstraction for export_read_file for collecting read info
Parameters
----------
target_ss : pd.DataFrame
DataFrame of reads to generate reads for
bulkfiles: list
list of bulk FAST5 files
read_start_col : str
Column in the target_ss that start index is derived from
read_end_col : str
Column in the target_ss that end index is derived from
export_path : str
The folder where read files will be written
remove_pore : bool
Remove pore-like signal from trace (>1500)
Returns
-------
pd.DataFrame
DataFrame of read info about reads that have been written
"""
d = {
'read_id': [],
'channel': [],
'start_index': [],
'end_index': [],
'bv_read_id': [],
'filename': [],
'bv_filename': []
}
files_written = 0
for bf in tqdm(bulkfiles):
f = h5py.File(bf, 'r')
run_id = f['UniqueGlobalKey']["tracking_id"].attrs["run_id"].decode('utf8')
sf = int(f["UniqueGlobalKey"]["context_tags"].attrs["sample_frequency"].decode('utf8'))
t = target_ss[target_ss['run_id'] == run_id]
t = t.dropna()
f.close()
file = h5py.File(bf, 'r')
for idx, row in tqdm(t.iterrows(), total=t.shape[0], desc=run_id):
si = int(np.floor(row[read_start_col] * sf))
ei = int(np.floor(row[read_end_col] * sf))
d['read_id'].append(row['read_id'])
d['channel'].append(row['channel'])
d['start_index'].append(si)
d['end_index'].append(ei)
d['bv_read_id'].append("{ch}-{start}-{end}".format(ch=row['channel'], start=si, end=ei))
d['filename'].append(row['filename'])
d['bv_filename'].append(export_read_file(row['channel'],
si,
ei,
file,
export_path,
remove_pore=remove_pore))
files_written += 1
print('{} reads written'.format(files_written))
return pd.DataFrame(d)
def concat_files_to_df(file_list, **kwargs):
"""Return a pandas.DataFrame from a list of files
"""
df_list = []
for f in file_list:
try:
df_list.append(pd.read_csv(filepath_or_buffer=f, **kwargs))
except pd.errors.ParserError as e:
print('{}\nThis is usually caused by an input file not being the expected format'.format(repr(e)))
sys.exit(1)
except Exception as e:
sys.exit(1)
return pd.concat(df_list, ignore_index=True)
def debug(args):
dirs = dir(args)
for attr in dirs:
if attr[0] != '_':
print('{a:<25} {b}'.format(a=attr, b=getattr(args, attr)))
if __name__ == '__main__':
main()
| 2.8125 | 3 |
frame/process_frame.py | kaf64/csvToolkit | 0 | 12772694 | import tkinter as tk
from tkinter import ttk
import pandas as pd
import numpy as np
class ProcessFrame(tk.Frame):
def __init__(self, parent: tk.Tk) -> None:
super().__init__(parent)
self.data = None
# init widgets
frame_delete_nan = tk.LabelFrame(self, text='Delete empty values')
frame_delete_nan.grid(row=0, column=0, sticky='nswe')
frame_fill_nan = tk.LabelFrame(self, text='Fill empty values')
frame_fill_nan.grid(row=1, column=0, sticky='nswe')
frame_new_column = tk.LabelFrame(self, text='Add new column')
frame_new_column.grid(row=3, column=0, sticky='NSWE')
frame_delete_column = tk.LabelFrame(self, text='Delete column')
frame_delete_column.grid(row=4, column=0, sticky='NSWE')
# widgets for delete empty values
self.label_list = tk.Label(frame_fill_nan, text='Select column :')
self.label_list.grid(row=0, column=0)
self.btn_delete_nan = tk.Button(frame_delete_nan,
command=lambda: self.delete_nan(column=str(self.list_columns.get())),
text='Delete rows with empty values')
self.btn_delete_nan.grid(row=0, column=0)
# widgets for fill empty values
self.label_replace_column = tk.Label(frame_fill_nan, text='Select column')
self.label_replace_column.grid(row=0, column=0)
self.list_columns = ttk.Combobox(frame_fill_nan)
self.list_columns.grid(row=0, column=1)
self.label_replace = tk.Label(frame_fill_nan, text='Select value to replace empty values')
self.label_replace.grid(row=1, column=0)
self.entry_replace = tk.Entry(frame_fill_nan)
self.entry_replace.grid(row=1, column=1)
self.btn_replace_nan = tk.Button(frame_fill_nan,
command=lambda: self.replace_nan(
column=str(self.list_columns.get()),
new_value=self.entry_replace.get()),
text='Replace empty values')
self.btn_replace_nan.grid(row=2, column=0)
#widgets for adding new column
self.label_add_col = tk.Label(frame_new_column, text='New column name:')
self.label_add_col.grid(row=0, column=0)
self.entry_add_col = tk.Entry(frame_new_column)
self.entry_add_col.grid(row=0, column=1)
label_init_val = tk.Label(frame_new_column, text='(optional) init value in new column:')
label_init_val.grid(row=1, column=0)
self.entry_init_val = tk.Entry(frame_new_column)
self.entry_init_val.grid(row=1, column=1)
self.btn_add_col = tk.Button(frame_new_column,
command=lambda: self.add_new_column(column_name=str(self.entry_add_col.get()),
init_value=str(self.entry_init_val.get())),
text='add new column')
self.btn_add_col.grid(row=2, column=0)
# add widgets to delete column
label_del_col = tk.Label(frame_delete_column, text='Set column to delete:')
label_del_col.grid(row=0, column=0)
self.list_del_col = ttk.Combobox(frame_delete_column)
self.list_del_col.grid(row=0, column=1)
btn_del_col = tk.Button(frame_delete_column,
text='Delete column',
command=lambda: self.delete_column(column_name=self.list_del_col.get()))
btn_del_col.grid(row=1, column=0)
def init_data(self, data: pd.DataFrame) -> None:
self.data = data
self.refresh_widgets()
def refresh_widgets(self):
list_container = self.data.columns.values.tolist()
self.list_del_col['values'] = list_container
list_container.append('all columns')
self.list_columns['values'] = list_container
def delete_nan(self, column: str) -> None:
self.data.dropna(inplace=True)
self.refresh_widgets()
print(self.data)
def replace_nan(self, column: str, new_value: str) -> None:
if column.strip() is not None:
if column == 'all columns':
self.data.fillna(value=new_value, inplace=True)
else:
self.data[column].fillna(value=new_value, inplace=True)
self.refresh_widgets()
def add_new_column(self, column_name: str, init_value: str) -> None:
if column_name.strip() is not None:
self.data.insert(loc=len(self.data.columns), column=column_name, value=init_value)
self.refresh_widgets()
def delete_column(self, column_name: str) -> None:
if column_name.strip() is not None:
del self.data[column_name]
self.refresh_widgets()
| 2.8125 | 3 |
ezgal/utils.py | AfonsoV/easyGalaxy | 14 | 12772695 | <gh_stars>10-100
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import array, os, re
import numpy as np
import math
# some useful constants
c = 299792458 # speed of light (m/sec)
m_per_au = 1.49598e11 # meters per astronomical unit
au_per_pc = 3600 * 180 / np.pi # AUs per parsec
def to_years(to_convert, units='gyrs', reverse=False):
""" res = ezgal.utils.to_years( to_convert, units='gyrs', reverse=False )
Converts the given time to years from the given units. If reverse=True then it converts from years into the given units
:param to_convert: The time to convert
:param units: The units to convert the time to
:param reverse: Converts from years if True
:type to_convert: int, float
:type units: string
:type reverse: bool
:returns: The converted time
:rtype: int, float
:Example:
>>> import ezgal
>>> ezgal.utils.to_years( 1e-9, units='gyrs' )
1.0
**units**
Available units are (case insensitive):
================== ====================
Name Units
================== ====================
gigayears,gyrs,gyr Gigayears
megayears,myrs,myr Megayears
years,yrs,yr Years
days,day Days
seconds,secs,sec,s Seconds
log log10(years)
================== ====================
.. seealso:: :func:`ezgal.utils.convert_time`
"""
units = units.lower()
factor = 0
to_convert = np.asarray(to_convert)
if units == 'gigayears' or units == 'gyrs' or units == 'gyr': factor = 1e9
if units == 'megayears' or units == 'myrs' or units == 'myr': factor = 1e6
if units == 'years' or units == 'yrs' or units == 'yr': factor = 1
if units == 'days' or units == 'day': factor = 1.0 / 365.0
if units == 'seconds' or units == 'secs' or units == 'sec' or units == 's':
factor = 1.0 / (365.0 * 86400)
if factor != 0:
if reverse: factor = 1.0 / factor
return to_convert * factor
if units == 'log':
if reverse: return np.log10(to_convert)
return 10.0**to_convert
raise NameError('Units of %s are not recognized!' % units)
def convert_time(to_convert, incoming='secs', outgoing='gyrs'):
""" res = ezgal.utils.convert_time( to_convert, incoming='secs', outgoing='gyrs' )
Converts the given time from the incoming units to the outgoing units.
:param to_convert: The length to convert
:param incoming: The units to convert the time from
:param outgoing: The units to convert the time to
:type to_convert: int, float
:type incoming: string
:type outgoing: string
:returns: The converted time
:rtype: int, float
:Example:
>>> import ezgal
>>> ezgal.utils.convert_time( 1, incoming='years', outgoing='s' )
31536000.0
.. seealso:: see :func:`ezgal.utils.to_years` for available units."""
return to_years(
to_years(to_convert, units=incoming),
units=outgoing,
reverse=True)
def to_meters(to_convert, units='a'):
""" res = ezgal.utils.to_meters( to_convert, units='a' )
Converts a length from the given units to meters
:param to_convert: The length to convert
:param units: The units to convert the length to
:type to_convert: int, float
:type units: string
:returns: The converted length
:rtype: int, float
:Example:
>>> import ezgal
>>> ezgal.utils.to_meters( 1e10, units='a' )
1.0
**units**
Available units are (case insensitive):
================= ====================
Name Units
================= ====================
a,angstroms Angstroms
nm,nanometers Nanometers
um,microns Microns
mm,millimeters Millimeters
cm,centimeters Centimeters
m,meters Meters
km,kilometers Kilometers
au Astronomical Units
pc,parsecs Parsecs
kpc, kiloparsecs Kiloparsecs
mpc, megaparsecs Megaparsecs
================= ====================
.. seealso:: :func:`ezgal.utils.convert_length`
"""
units = units.lower()
to_convert = np.asarray(to_convert)
if units == 'angstroms' or units == 'a': return to_convert / 1e10
if units == 'nanometers' or units == 'nm': return to_convert / 1e9
if units == 'microns' or units == 'um': return to_convert / 1e6
if units == 'millimeters' or units == 'mm': return to_convert / 1e3
if units == 'centimeters' or units == 'cm': return to_convert / 1e2
if units == 'meters' or units == 'm': return to_convert
if units == 'kilometers' or units == 'km': return to_convert * 1000.0
if units == 'au': return to_convert * m_per_au
if units == 'parsecs' or units == 'pc':
return to_convert * m_per_au * au_per_pc
if units == 'kilparsecs' or units == 'kpc':
return to_convert * m_per_au * au_per_pc * 1000.0
if units == 'megaparsecs' or units == 'mpc':
return to_convert * m_per_au * au_per_pc * 1e6
raise NameError('Units of %s are not recognized!' % units)
def to_hertz(to_convert, units='a'):
""" res = ezgal.utils.to_hertz( to_convert, units='Angstroms' )
Converts the given wavelength (in the given units) to hertz.
:param to_convert: The wavelength to convert
:param units: The units the wavelength is in
:type to_convert: int, float
:type units: string
:returns: The converted frequency
:rtype: float
:Example:
>>> import ezgal
>>> ezgal.utils.to_hertz( 1000, units='a' )
2997924580000000.0
.. seealso::
see :func:`ezgal.utils.to_meters` for list of available units
Also see :func:`ezgal.utils.to_lambda`
"""
return (c / to_meters(1.0, units=units)) / np.asarray(to_convert)
def to_lambda(to_convert, units='a'):
""" res = ezgal.utils.to_lambda( to_convert, units='a' )
Converts the given frequency to a wavelength in the given units.
:param to_convert: The frequency to convert
:param units: The desired units of the output wavelength
:type to_convert: int, float
:type units: string
:returns: The converted wavelength
:rtype: float
:Example:
>>> import ezgal
>>> ezgal.utils.to_lambda( 2997924580000000.0, units='a' )
1000.0
.. seealso::
see :func:`ezgal.utils.to_meters` for list of available units
Also see :func:`ezgal.utils.to_hertz`
"""
return (c / to_meters(1.0, units=units)) / np.asarray(to_convert)
def convert_length(to_convert, incoming='m', outgoing='a'):
""" res = ezgal.utils.convert_length( to_convert, incoming='m', outgoing='a' )
converts a length from the incoming units to the outgoing units.
:param to_convert: The length to convert
:param incoming: The units to convert the length from
:param outgoing: The units to convert the length to
:type to_convert: int, float
:type incoming: string
:type outgoing: string
:returns: The converted length
:rtype: int, float
:Example:
>>> import ezgal
>>> ezgal.utils.convert_length( 1, incoming='pc', outgoing='au' )
206264.80624709636
.. seealso:: see :func:`ezgal.utils.to_meters` for available units.
"""
return to_meters(to_convert, units=incoming) / to_meters(1.0,
units=outgoing)
def rascii(filename, silent=False):
""" res = ezgal.utils.rascii( filename, silent=False )
Reads in numeric data stored in an ascii file into a numpy array.
:param filename: The name of the ascii file
:param silent: Whether or not to output basic file information
:type filename: string
:type silent: bool
:returns: A numpy array
:rtype: np.array()
.. warning::
Skips any lines that have any non-numeric data, and any data lines with a different number of columns than the first data line.
.. seealso:: :func:`ezgal.utils.wascii`
"""
# accept an open filepointer or a filename
if type(filename) == type(''):
file = open(filename, 'r')
else:
file = filename
found = False
nlines = 0
ngood = 0
for line in file:
nlines += 1
if re.search('^\s*$', line) or re.search('[^\s\d.eEdD\-+]', line):
continue
parts = line.split()
nparts = len(parts)
if not found:
found = True
allowed = nparts
res = parts
continue
if nparts != allowed: continue
ngood += 1
res.extend(parts)
ngood += 1
if ngood == 0: return np.array([])
arr = np.array(res)
arr.shape = (ngood, -1)
return np.array(res).reshape(ngood, -1).astype('float')
def wascii(array, filename, formats, blank=False, header=None, names=None):
""" ezgal.utils.wascii( array, filename, formats, blank=False, header=False, names=None )
Writes out a np array to a well formated file.
:param array: The numpy array to write out
:param filename: The name of the output file
:param formats: A list of python string formats (one for each column)
:param blank: Whether or not to output a blank line at the end of the file
:param header: A string or list of strings to write out as the header
:param names: A list of column names with which to build a header
:type array: a 2D numpy array
:type filename: string
:type formats: string,list
:type blank: bool
:type header: string,list
:type blank: string,list
"""
table = np.asarray(array)
if table.ndim != 2: raise NameError('I was expecting a 2D data table')
nrows, ncols = table.shape
if type(formats) is str:
formats = [formats] * ncols
if ncols != len(formats):
raise NameError(
'Number of supplied formats does not match number of table columns!')
# if column names were provided, create a header that list column names/numbers
if names is not None:
if len(names) != ncols:
raise NameError(
'Number of supplied column names does not match number of table columns!')
if header is None: header = []
header.append('# Column Descriptions:')
name_format = '# %0' + ('%1d' %
(math.ceil(math.log10(ncols)))) + 'd: %s'
for i in range(ncols):
header.append(name_format % (i + 1, names[i]))
if (header is not None) & isinstance(header, list):
header = "\n".join(header)
if ncols == 1:
file = "\n".join(formats[0] % val for val in table.ravel())
else:
strings = [''] * nrows
for i in range(nrows):
strings[i] = ' '.join(
[format % val for format, val in zip(formats, table[i, :])])
file = "\n".join(strings)
# filename can be a filename or file pointer
# in the case of a file pointer don't close it
if type(filename) == type(''):
fh = open(filename, 'wb')
do_close = True
else:
fh = filename
do_close = False
if header is not None: fh.write(header + "\n")
fh.write(file)
if blank: fh.write("\n")
if do_close:
fh.close()
def _read_binary(fhandle, type='i', number=1, swap=False):
""" res = ezgal.utils._read_binary( fhandle, type='i', number=1, swap=False )
reads 'number' binary characters of type 'type' from file handle 'fhandle'
returns the value (for one character read) or a numpy array
set swap=True to byte swap the array after reading
"""
arr = array.array(type)
arr.fromfile(fhandle, number)
if swap: arr.byteswap()
if len(arr) == 1:
return arr[0]
else:
return np.asarray(arr)
def read_ised(file):
""" ( seds, ages, vs ) = ezgal.utils.read_ised( file )
Read a bruzual and charlot binary ised file.
:param file: The name of the ised file
:type file: string
:returns: A tuple containing model data
:rtype: tuple
.. note::
All returned variables are numpy arrays. ages and vs are one dimensional arrays, and seds has a shape of (vs.size,ages.size)
**units**
Returns units of:
=============== ===============
Return Variable Units
=============== ===============
seds Ergs/s/cm**2/Hz
ages Years
vs Hz
=============== ===============
"""
if not (os.path.isfile(file)):
raise ValueError('The specified model file was not found!')
# open the ised file
fh = open(file, 'rb')
# start reading
junk = _read_binary(fh)
nages = _read_binary(fh)
# first consistency check
if nages < 1 or nages > 2000:
raise ValueError(
'Problem reading ised file - unexpected data found for the number of ages!')
# read ages
ages = np.asarray(_read_binary(fh, type='f', number=nages))
# read in a bunch of stuff that I'm not interested in but which I read like this to make sure I get to the right spot in the file
junk = _read_binary(fh, number=2)
iseg = _read_binary(fh, number=1)
if iseg > 0: junk = _read_binary(fh, type='f', number=6 * iseg)
junk = _read_binary(fh, type='f', number=3)
junk = _read_binary(fh)
junk = _read_binary(fh, type='f')
junk = _read_binary(fh, type='c', number=80)
junk = _read_binary(fh, type='f', number=4)
junk = _read_binary(fh, type='c', number=160)
junk = _read_binary(fh)
junk = _read_binary(fh, number=3)
# read in the wavelength data
nvs = _read_binary(fh)
# consistency check
if nvs < 10 or nvs > 12000:
raise ValueError(
'Problem reading ised file - unexpected data found for the number of wavelengths!')
# read wavelengths and convert to frequency (comes in as Angstroms)
# also reverse the array so it will be sorted after converting to frequency
ls = _read_binary(fh, type='f', number=nvs)[::-1]
# create an array for storing SED info
seds = np.zeros((nvs, nages))
# now loop through and read in all the ages
for i in range(nages):
junk = _read_binary(fh, number=2)
nv = _read_binary(fh)
if nv != nvs:
raise ValueError(
'Problem reading ised file - unexpected data found while reading seds!')
seds[:, i] = _read_binary(fh, type='f', number=nvs)[::-1]
nx = _read_binary(fh)
junk = _read_binary(fh, type='f', number=nx)
# now convert the seds from Lo/A to ergs/s/Hz
seds *= 3.826e33 * ls.reshape(
(nvs, 1))**2.0 / convert_length(c, outgoing='a')
# convert from ergs/s/Hz to ergs/s/Hz/cm^2.0 @ 10pc
seds /= 4.0 * np.pi * convert_length(10, incoming='pc', outgoing='cm')**2.0
vs = to_hertz(ls)
fh.close()
# sort in frequency space
sinds = vs.argsort()
return (seds[sinds, :], ages, vs[sinds, :])
| 3.03125 | 3 |
adjutant-plugin/mfa_actions/serializers.py | catalyst-cloud/adjutant-mfa | 2 | 12772696 | from adjutant.actions.v1.serializers import BaseUserIdSerializer
from rest_framework import serializers
class EditMFASerializer(BaseUserIdSerializer):
delete = serializers.BooleanField(default=False)
| 1.609375 | 2 |
concepts/matrices.py | ymizoguchi/concepts | 0 | 12772697 | # matrices.py - boolean matrices as row bitsets and column bitsets
"""Boolean matrices as collections of row and column vectors."""
import bitsets
from ._compat import zip
__all__ = ['Relation']
Vector = bitsets.bases.MemberBits
"""Single row or column of a boolean matrix as bit vector."""
class Vectors(bitsets.series.Tuple):
"""Paired collection of rows or columns of a boolean matrix relation."""
def _pair_with(self, relation, index, other):
if hasattr(self, 'prime'):
raise RuntimeError('%r attempt _pair_with %r' % (self, other))
self.relation = relation
self.relation_index = index
Prime = other.BitSet.supremum # noqa: N806
Double = self.BitSet.supremum # noqa: N806
_prime = other.BitSet.fromint
_double = self.BitSet.fromint
def prime(bitset):
"""FCA derivation operator (extent->intent, intent->extent)."""
prime = Prime
for o in other:
if bitset & 1:
prime &= o
bitset >>= 1
if not bitset:
break
return _prime(prime)
def double(bitset):
"""FCA double derivation operator (extent->extent, intent->intent)."""
prime = Prime
for o in other:
if bitset & 1:
prime &= o
bitset >>= 1
if not bitset:
break
double = Double
for s in self:
if prime & 1:
double &= s
prime >>= 1
if not prime:
break
return _double(double)
def doubleprime(bitset):
"""FCA single and double derivation (extent->extent+intent, intent->intent+extent)."""
prime = Prime
for o in other:
if bitset & 1:
prime &= o
bitset >>= 1
if not bitset:
break
bitset = prime
double = Double
for s in self:
if bitset & 1:
double &= s
bitset >>= 1
if not bitset:
break
return _double(double), _prime(prime)
self.prime = self.BitSet.prime = prime
self.double = self.BitSet.double = double
self.doubleprime = self.BitSet.doubleprime = doubleprime
def __reduce__(self):
return self.relation, (self.relation_index,)
class Relation(tuple):
"""Binary relation as interconnected pair of bitset collections.
>>> br = Relation('Condition', 'Symbol',
... ('TT', 'TF', 'FT', 'FF'), ('->', '<-'),
... [(True, False, True, True), (True, True, False, True)])
>>> br
<Relation(ConditionVectors('1011', '1101'), SymbolVectors('11', '01', '10', '11'))>
>>> br[1].BitSet.frommembers(('->', '<-')).prime().members()
('TT', 'FF')
"""
__slots__ = ()
def __new__(cls, xname, yname, xmembers, ymembers, xbools, _ids=None):
if _ids is not None: # unpickle reconstruction
xid, yid = _ids
X = bitsets.meta.bitset(xname, xmembers, xid, Vector, None, Vectors) # noqa: N806
Y = bitsets.meta.bitset(yname, ymembers, yid, Vector, None, Vectors) # noqa: N806
else:
X = bitsets.bitset(xname, xmembers, Vector, tuple=Vectors) # noqa: N806
Y = bitsets.bitset(yname, ymembers, Vector, tuple=Vectors) # noqa: N806
x = X.Tuple.frombools(xbools)
y = Y.Tuple.frombools(zip(*x.bools()))
self = super(Relation, cls).__new__(cls, (x, y))
x._pair_with(self, 0, y)
y._pair_with(self, 1, x)
return self
__call__ = tuple.__getitem__
def __repr__(self):
return '<%s(%r, %r)>' % (self.__class__.__name__, self[0], self[1])
def __reduce__(self):
X, Y = (v.BitSet for v in self) # noqa: N806
bools = self[0].bools()
ids = (X._id, Y._id)
args = (X.__name__, Y.__name__, X._members, Y._members, bools, ids)
return self.__class__, args
| 3.375 | 3 |
src/chapter-4/conftest.py | luizyao/pytest-chinese-doc | 283 | 12772698 | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
'''
Author: <NAME> (<EMAIL>)
Created Date: 2019-09-19 5:35:12
-----
Last Modified: 2019-10-07 8:27:16
Modified By: <NAME> (<EMAIL>)
-----
THIS PROGRAM IS FREE SOFTWARE, IS LICENSED UNDER MIT.
A short and simple permissive license with conditions
only requiring preservation of copyright and license notices.
Copyright © 2019 <NAME>
-----
HISTORY:
Date By Comments
---------- -------- ---------------------------------------------------------
'''
import os
import smtplib
import tempfile
import pytest
@pytest.fixture(scope='module')
def smtp_connection():
return smtplib.SMTP("smtp.163.com", 25, timeout=5)
@pytest.fixture(scope='package')
def smtp_connection_package():
return smtplib.SMTP("smtp.163.com", 25, timeout=5)
@pytest.fixture()
def smtp_connection_yield():
smtp_connection = smtplib.SMTP("smtp.163.com", 25, timeout=5)
yield smtp_connection
print("关闭SMTP连接")
smtp_connection.close()
@pytest.fixture(scope='module')
def smtp_connection_request(request):
server, port = getattr(request.module, 'smtp_server', ("smtp.163.com", 25))
with smtplib.SMTP(server, port, timeout=5) as smtp_connection:
yield smtp_connection
print("断开 %s:%d" % (server, port))
@pytest.fixture(scope='module', params=['smtp.163.com', 'smtp.126.com'])
def smtp_connection_params(request):
server = request.param
with smtplib.SMTP(server, 25, timeout=5) as smtp_connection:
yield smtp_connection
print("断开 %s:%d" % (server, 25))
@pytest.fixture()
def cleandir():
newpath = tempfile.mkdtemp()
os.chdir(newpath)
| 2.40625 | 2 |
kubeflow/components/data/validation/run.py | WillianFuks/pySearchML | 27 | 12772699 | <gh_stars>10-100
import sys
import os
import argparse
import pathlib
import uuid
from shutil import rmtree
from google.cloud import storage, bigquery
PATH = pathlib.Path(__file__).parent
def main(validation_init_date, validation_end_date, bucket, destination):
# Remove everything and deletes destination folder to receive new files.
rmtree(destination, ignore_errors=True)
os.makedirs(destination, exist_ok=True)
storage_client = storage.Client()
bq_client = bigquery.Client()
ds_ref = bq_client.dataset('pysearchml')
table_id = str(uuid.uuid4().hex)
table_ref = ds_ref.table(table_id)
# Query GA data
query_path = PATH / 'validation.sql'
query = open(str(query_path)).read()
query = query.format(validation_init_date=validation_init_date,
validation_end_date=validation_end_date)
job_config = bigquery.QueryJobConfig()
job_config.destination = f'{bq_client.project}.pysearchml.{table_id}'
job_config.maximum_bytes_billed = 10 * (1024 ** 3)
job_config.write_disposition = 'WRITE_TRUNCATE'
job = bq_client.query(query, job_config=job_config)
job.result()
# export BigQuery table to GCS
# bucket will be set in accordance to which validation dataset is referenced, i.e.,
# whether regular validation or validation for the training dataset.
destination_uri = f"gs://{bucket}/validation*.gz"
extract_config = bigquery.ExtractJobConfig()
extract_config.compression = 'GZIP'
extract_config.destination_format = 'NEWLINE_DELIMITED_JSON'
job = bq_client.extract_table(table_ref, destination_uri, job_config=extract_config)
job.result()
# Download data
bucket_obj = storage_client.bucket(bucket.split('/')[0])
blobs = bucket_obj.list_blobs(prefix=bucket.partition('/')[-1])
for blob in blobs:
blob.download_to_filename(f"{destination}/{blob.name.split('/')[-1]}")
blob.delete()
# delete BQ table
bq_client.delete_table(table_ref)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--validation_init_date',
dest='validation_init_date',
type=str,
help='Date in format %Y%M%D from when to start querying GA data'
)
parser.add_argument(
'--validation_end_date',
dest='validation_end_date',
type=str,
help='Date in format %Y%M%D from when to stop querying GA data'
)
parser.add_argument(
'--bucket',
dest='bucket',
type=str
)
parser.add_argument(
'--destination',
dest='destination',
type=str,
help='Path where validation dataset gzipped files will be stored.'
)
args, _ = parser.parse_known_args(sys.argv[1:])
main(
args.validation_init_date,
args.validation_end_date,
args.bucket,
args.destination
)
| 2.140625 | 2 |
tests/test_clj.py | papaver/pyfn | 3 | 12772700 | <reponame>papaver/pyfn<gh_stars>1-10
#------------------------------------------------------------------------------
# test_clj.py
#------------------------------------------------------------------------------
# BSD 3-Clause License
#
# Copyright (c) 2018, Affirm
# Copyright (c) 2018, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#------------------------------------------------------------------------------
import unittest
from pyfnz.clj import *
#------------------------------------------------------------------------------
# test classes
#------------------------------------------------------------------------------
class CoreTest(unittest.TestCase):
#--------------------------------------------------------------------------
# tests
#--------------------------------------------------------------------------
def test_is_some(self):
"""Test checking if a value is not None.
"""
self.assertTrue(is_some([]))
self.assertTrue(is_some({}))
self.assertTrue(is_some(set([])))
self.assertTrue(is_some(0))
self.assertTrue(is_some(0.0))
self.assertTrue(is_some(''))
self.assertFalse(is_some(None))
#--------------------------------------------------------------------------
def test_is_empty(self):
"""Test checking a collection for emptiness.
"""
self.assertTrue(is_empty([]))
self.assertTrue(is_empty({}))
self.assertTrue(is_empty(set([])))
self.assertFalse(is_empty([1]))
self.assertFalse(is_empty({'a':1}))
self.assertFalse(is_empty(set([1])))
#--------------------------------------------------------------------------
def test_first(self):
"""Test retrieving the first element in list.
"""
self.assertEqual(None, first(None))
self.assertEqual(None, first([]))
self.assertEqual(1, first([1]))
self.assertEqual(1, first([1, 2, 3]))
#--------------------------------------------------------------------------
def test_second(self):
"""Test retrieving the second element in list.
"""
self.assertEqual(None, second(None))
self.assertEqual(None, second([]))
self.assertEqual(None, second([1]))
self.assertEqual(2, second([1, 2, 3]))
#--------------------------------------------------------------------------
def test_ffirst(self):
"""Test retrieving the first element of the first element in list.
"""
self.assertEqual(None, ffirst(None))
self.assertEqual(None, ffirst([]))
self.assertEqual(1, ffirst([[1]]))
self.assertEqual(1, ffirst([[1, 2]]))
self.assertEqual(1, ffirst([[1, 2], [3, 4]]))
#--------------------------------------------------------------------------
def test_last(self):
"""Test retrieving the last element in list.
"""
self.assertEqual(None, last(None))
self.assertEqual(None, last([]))
self.assertEqual(1, last([1]))
self.assertEqual(3, last([1, 2, 3]))
#--------------------------------------------------------------------------
def test_butlast(self):
"""Test retrieving the all but the last element in list.
"""
self.assertEqual(None, butlast(None))
self.assertEqual(None, butlast([]))
self.assertEqual(None, butlast([1]))
self.assertEqual([1], butlast([1, 2]))
self.assertEqual([1, 2], butlast([1, 2, 3]))
#--------------------------------------------------------------------------
def test_nxt(self):
"""Test retrieving the all elements after the first, None if zero
elements.
"""
self.assertEqual(None, nxt(None))
self.assertEqual(None, nxt([]))
self.assertEqual(None, nxt([1]))
self.assertEqual([2], nxt([1, 2]))
self.assertEqual([2, 3], nxt([1, 2, 3]))
#--------------------------------------------------------------------------
def test_rest(self):
"""Test retrieving the all elements after the first.
"""
self.assertEqual(None, rest(None))
self.assertEqual([], rest([]))
self.assertEqual([], rest([1]))
self.assertEqual([2, 3], rest([1, 2, 3]))
#--------------------------------------------------------------------------
def test_merge(self):
"""Test merging multiple dictionaries into one.
"""
a = {'a':1}
b = {'b':2}
c = {'a':2, 'c':3}
self.assertEqual(None, merge())
self.assertEqual(None, merge(None))
self.assertEqual({}, merge({}))
self.assertEqual({}, merge({}, None))
self.assertEqual({}, merge(None, {}))
self.assertEqual({'a':1, 'b':2}, merge(a, b))
self.assertEqual({'a':1, 'b':2}, merge(a, None, b))
self.assertEqual({'a':2, 'b':2, 'c':3}, merge(a, c, b))
self.assertEqual({'a':1, 'b':2, 'c':3}, merge(c, b, a))
#--------------------------------------------------------------------------
def test_select_keys(self):
"""Test creating new dict with select keys from existing dict.
"""
a = {'a':1}
b = {'b':2, 'c':3}
self.assertEqual({}, select_keys({}, []))
self.assertEqual({}, select_keys({}, ['a']))
self.assertEqual({}, select_keys(a, []))
self.assertEqual(a, select_keys(a, ['a']))
self.assertEqual({}, select_keys(b, ['a']))
self.assertEqual(b, select_keys(b, ['a', 'b', 'c']))
self.assertEqual(b, select_keys(b, ['b', 'c']))
#--------------------------------------------------------------------------
def test_some(self):
"""Test returning first logical true value in a list.
"""
is_a = lambda x: x == 'a'
fn_a = lambda x: x == 'a' and x
self.assertEqual(None, some(is_a, None))
self.assertEqual(None, some(is_a, []))
self.assertEqual(None, some(is_a, ['b']))
self.assertEqual(True, some(is_a, ['a']))
self.assertEqual(True, some(is_a, ['b', 'a']))
self.assertEqual(True, some(is_a, ['b', 'a', 'c']))
self.assertEqual(None, some(fn_a, None))
self.assertEqual(None, some(fn_a, []))
self.assertEqual(None, some(fn_a, ['b']))
self.assertEqual('a', some(fn_a, ['a']))
self.assertEqual('a', some(fn_a, ['b', 'a']))
self.assertEqual('a', some(fn_a, ['b', 'a', 'c']))
#--------------------------------------------------------------------------
def test_identity(self):
"""Test identity function.
"""
l = lambda: None
self.assertEqual({}, identity({}))
self.assertEqual([], identity([]))
self.assertEqual(set([1]), identity(set([1])))
self.assertEqual(1, identity(1))
self.assertEqual(1.0, identity(1.0))
self.assertEqual('a', identity('a'))
self.assertEqual(l, identity(l))
#--------------------------------------------------------------------------
def test_constantly(self):
"""Test constantly returning input value.
"""
lst = constantly([])
num = constantly(1)
lmb = constantly(lambda: 'a')
self.assertEqual([], lst())
self.assertEqual([], lst(1))
self.assertEqual([], lst(1, x=2))
self.assertEqual(1, num())
self.assertEqual(1, num(1))
self.assertEqual(1, num(1, x=2))
self.assertEqual('a', lmb()())
self.assertEqual('a', lmb(1)())
self.assertEqual('a', lmb(1, x=2)())
#--------------------------------------------------------------------------
def test_comp(self):
"""Test composing functions together.
"""
join = lambda xs: "".join(map(str, xs))
joind = lambda xs, d: d.join(map(str, xs))
ident = comp()
f = comp(reversed)
fg = comp(list, reversed)
fgh = comp(join, list, reversed)
fghi = comp(join, list, reversed, joind)
self.assertEqual('a', ident('a'))
self.assertEqual([3, 2, 1], list(f([1, 2, 3])))
self.assertEqual(['d', 'c', 'b', 'a'], fg("abcd"))
self.assertEqual("dcba", fgh("abcd"))
self.assertEqual("d.c.b.a", fghi(['a', 'b', 'c', 'd'], "."))
| 1.257813 | 1 |
shipment_management/provider_fedex.py | vishdha/shipment_management | 2 | 12772701 | <reponame>vishdha/shipment_management<filename>shipment_management/provider_fedex.py
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import binascii
import datetime
import json
import frappe
from frappe import _
from frappe.utils import cint
from frappe.utils.file_manager import *
from frappe.utils.password import get_decrypted_password
from shipment_management.config.app_config import PRIMARY_FEDEX_DOC_NAME, ExportComplianceStatement
from shipment_management.shipment import check_permission
# ########################### FEDEX IMPORT ####################################
# IMPORT FEDEX LIBRARY IS WITH <<frappe.get_module>> BECAUSE OF BUG
# Seems like the sandbox import path is broken on certain modules.
# More details: https://discuss.erpnext.com/t/install-requirements-with-bench-problem-importerror/16558/5
# If import error during installation try reinstall fedex manually:
# bench shell
# pip install fedex
# Make sure fedex and all the library file files are there ~/frappe-bench/env/lib/python2.7/
fedex_track_service = frappe.get_module("fedex.services.track_service")
# TODO - Fix import after https://github.com/python-fedex-devs/python-fedex/pull/86
from shipment_management.temp_fedex.ship_service import FedexDeleteShipmentRequest, FedexProcessInternationalShipmentRequest, FedexProcessShipmentRequest
from shipment_management.temp_fedex.rate_service import FedexRateServiceRequest, FedexInternationalRateServiceRequest
# #############################################################################
# rate_service = frappe.get_module("fedex.services.rate_service")
# ship_service = frappe.get_module("fedex.services.ship_service")
# FedexDeleteShipmentRequest = ship_service.FedexDeleteShipmentRequest
# FedexProcessShipmentRequest = ship_service.FedexProcessShipmentRequest
# FedexRateServiceRequest = rate_service.FedexRateServiceRequest
fedex_config = frappe.get_module("fedex.config")
conversion = frappe.get_module("fedex.tools.conversion")
availability_commitment_service = frappe.get_module("fedex.services.availability_commitment_service")
base_service = frappe.get_module("fedex.base_service")
FedexError = base_service.FedexError
subject_to_json = conversion.sobject_to_json
FedexTrackRequest = fedex_track_service.FedexTrackRequest
FedexConfig = fedex_config.FedexConfig
FedexAvailabilityCommitmentRequest = availability_commitment_service.FedexAvailabilityCommitmentRequest
# #############################################################################
CUSTOMER_TRANSACTION_ID = "*** TrackService Request v10 using Python ***"
def _get_configuration():
fedex_server_doc_type = frappe.db.sql(
'''SELECT * from `tabDTI Fedex Configuration` WHERE name = "%s"''' % PRIMARY_FEDEX_DOC_NAME, as_dict=True)
if not fedex_server_doc_type:
frappe.throw(_("Please create Fedex Configuration: %s" % PRIMARY_FEDEX_DOC_NAME))
return fedex_server_doc_type[0]
def get_fedex_server_info():
fedex_server_doc_type = _get_configuration()
config_message = """<b>FEDEX CONFIG:</b>
<br><b>key </b> : '{key}'
<br><b>password </b> : '{password}'
<br><b>account_number </b> : '{account_number}'
<br><b>meter_number </b> : '{meter_number}'
<br><b>freight_account_number</b> : '{freight_account_number}'
<br><b>use_test_server </b> : '{use_test_server}'""".format(
key=fedex_server_doc_type['fedex_key'],
password=fedex_server_doc_type['password'],
account_number=fedex_server_doc_type['account_number'],
meter_number=fedex_server_doc_type['meter_number'],
freight_account_number=fedex_server_doc_type['freight_account_number'],
use_test_server=fedex_server_doc_type['use_test_server'])
return config_message
def get_fedex_config():
fedex_server_doc_type = _get_configuration()
if fedex_server_doc_type['use_test_server']:
_test_server = True
else:
_test_server = False
return FedexConfig(key=fedex_server_doc_type['fedex_key'],
password=get_decrypted_password('<PASSWORD>',
PRIMARY_FEDEX_DOC_NAME,
fieldname='password',
raise_exception=True),
account_number=fedex_server_doc_type['account_number'],
meter_number=fedex_server_doc_type['meter_number'],
freight_account_number=fedex_server_doc_type['freight_account_number'],
use_test_server=_test_server)
CONFIG_OBJ = get_fedex_config()
# #############################################################################
# #############################################################################
# #############################################################################
@frappe.whitelist()
def estimate_fedex_delivery_time(OriginPostalCode=None,
OriginCountryCode=None,
DestinationPostalCode=None,
DestinationCountryCode=None):
avc_request = FedexAvailabilityCommitmentRequest(CONFIG_OBJ)
avc_request.Origin.PostalCode = OriginPostalCode
avc_request.Origin.CountryCode = OriginCountryCode
avc_request.Destination.PostalCode = DestinationPostalCode
avc_request.Destination.CountryCode = DestinationCountryCode
return avc_request.ShipDate
# #############################################################################
# #############################################################################
# #############################################################################
def create_fedex_package(sequence_number, shipment, box, source_doc):
items_in_one_box = parse_items_in_box(box)
# ------------------------
# Weight:
package_weight = shipment.create_wsdl_object_of_type('Weight')
package_weight.Value = get_total_box_value(box=box, source_doc=source_doc, attrib='weight_value')
package_weight.Units = get_shipment_weight_units(source_doc)
package = shipment.create_wsdl_object_of_type('RequestedPackageLineItem')
package.Weight = package_weight
# ------------------------
# Insurance:
package1_insure = shipment.create_wsdl_object_of_type('Money')
package1_insure.Currency = 'USD'
package1_insure.Amount = get_total_box_value(box=box, source_doc=source_doc, attrib='insurance')
package.InsuredValue = package1_insure
# ------------------------
if source_doc.signature_option:
package.SpecialServicesRequested.SpecialServiceTypes = 'SIGNATURE_OPTION'
package.SpecialServicesRequested.SignatureOptionDetail.OptionType = source_doc.signature_option
if box.reference_note:
customer_reference = shipment.create_wsdl_object_of_type('CustomerReference')
customer_reference.CustomerReferenceType="CUSTOMER_REFERENCE"
customer_reference.Value = box.reference_note
package.CustomerReferences.append(customer_reference)
if box.packaging_type:
box_doc = frappe.get_doc("Shipping Package", box.packaging_type)
shipment.RequestedShipment.PackagingType = box_doc.box_code
package.PhysicalPackaging = box_doc.physical_packaging
if box_doc.box_code == "YOUR_PACKAGING":
package_dim = shipment.create_wsdl_object_of_type("Dimensions")
package_dim.Length = cint(box_doc.length)
package_dim.Width = cint(box_doc.width)
package_dim.Height = cint(box_doc.height)
package_dim.Units = "IN"
package.Dimensions = package_dim
package.SequenceNumber = sequence_number
if source_doc.international_shipment:
total_box_custom_value = 0
for i, item in enumerate(items_in_one_box):
quantity = items_in_one_box[item]
# ########################
# Total Insured value exceeds customs value (Error code: 2519)
# FIX :
package.InsuredValue.Amount = get_item_by_item_code(source_doc, item).insurance
# #######################
# For international multiple piece shipments,
# commodity information must be passed in the Master and on each child transaction.
# If this shipment contains more than four commodities line items,
# the four highest valued should be included in the first 4 occurances for this request.
commodity = shipment.create_wsdl_object_of_type('Commodity')
commodity.Name = get_item_by_item_code(source_doc, item).item_name # Name of this commodity.
commodity.NumberOfPieces = quantity # Total number of pieces of this commodity
# Complete and accurate description of this commodity:
commodity.Description = get_item_by_item_code(source_doc, item).description
commodity.CountryOfManufacture = source_doc.shipper_address_country_code
# Weight of this commodity:
package_weight = shipment.create_wsdl_object_of_type('Weight')
package_weight.Value = get_item_by_item_code(source_doc, item).weight_value
package_weight.Units = get_shipment_weight_units(source_doc)
commodity.Weight = package_weight
# This field is used for enterprise transactions:
commodity.Quantity = quantity
# Unit of measure used to express the quantity of this commodity line item:
commodity.QuantityUnits = 'EA'
# Value of each unit in Quantity. Six explicit decimal positions, Max length 18 including decimal:
commodity.UnitPrice.Currency = "USD"
commodity.UnitPrice.Amount = get_item_by_item_code(source_doc, item).rate
# Total customs value for this line item.
# It should equal the commodity unit quantity times commodity unit value:
commodity.CustomsValue.Currency = "USD"
commodity.CustomsValue.Amount = get_item_by_item_code(source_doc, item).custom_value * quantity
total_box_custom_value += commodity.CustomsValue.Amount
if commodity.CustomsValue.Amount == 0:
frappe.throw(_("CUSTOM VALUE = 0. Please specify custom value in items"))
if commodity.CustomsValue.Amount >= 2500 or source_doc.recipient_address_country_code in ['CA', 'MX']:
export_detail = shipment.create_wsdl_object_of_type('ExportDetail')
export_detail.ExportComplianceStatement = ExportComplianceStatement
shipment.RequestedShipment.CustomsClearanceDetail.ExportDetail = export_detail
shipment.RequestedShipment.CustomsClearanceDetail.CustomsValue.Amount = commodity.CustomsValue.Amount
shipment.RequestedShipment.CustomsClearanceDetail.CustomsValue.Currency = commodity.CustomsValue.Currency
shipment.add_commodity(commodity)
commodity_message = """<div style="color: #36414c; background-color: #f0f5f5;
font-size: 80% ; padding: 10px; border-radius: 10px; border: 2px groove;">
<b>ITEM NAME</b> = {name} <br>
<b>NUMBER OF PIECES </b> = {number_of_pieces}<br>
<b>DESCRIPTION:</b> <br>
{description}<br>
<b>COUNTRY OF MANUFACTURE </b> = {country_manufacture}<br>
<b>WEIGHT </b> = {weight} <br>
<b>QUANTITY </b> = {quantity} <br>
<b>QUANTITY UNITS </b> = {quantity_unites} <br>
<b>UNIT PRICE CURRENCY </b> = {unit_price_currency} <br>
<b>UNIT PRICE AMOUNT (RATE) </b> = {unit_price_amount} <br>
<b>CUSTOM VALUE CURRENCY </b> = {custom_value_currency} <br>
<b>TOTAL CUSTOM VALUE AMOUNT </b> = {custom_value_amount} <br></div>
""".format(box_number=sequence_number,
name=commodity.Name,
number_of_pieces=commodity.NumberOfPieces,
description=commodity.Description,
country_manufacture=commodity.CountryOfManufacture,
weight="%s %s" % (commodity.Weight.Value, commodity.Weight.Units),
quantity=commodity.Quantity,
quantity_unites=commodity.QuantityUnits,
unit_price_currency=commodity.UnitPrice.Currency,
unit_price_amount=commodity.UnitPrice.Amount,
custom_value_currency=commodity.CustomsValue.Currency,
custom_value_amount=commodity.CustomsValue.Amount)
if i > 0:
commodity_message = box.commodity_information + "<br>" + commodity_message
frappe.db.set(box, 'commodity_information', str(commodity_message))
frappe.db.set(box, 'total_box_custom_value', total_box_custom_value)
# -----------------------------
frappe.db.set(box, 'total_box_insurance', get_total_box_value(box=box,
source_doc=source_doc,
attrib='insurance'))
frappe.db.set(box, 'total_box_weight', '%s (%s)' % (get_total_box_value(box=box,
source_doc=source_doc,
attrib='weight_value'),
get_shipment_weight_units(source_doc)))
return package
# #############################################################################
# #############################################################################
# #############################################################################
def create_fedex_shipment(source_doc):
GENERATE_IMAGE_TYPE = source_doc.file_format
if source_doc.international_shipment:
shipment = FedexProcessInternationalShipmentRequest(CONFIG_OBJ, customer_transaction_id=CUSTOMER_TRANSACTION_ID)
service_type = source_doc.service_type_international
else:
shipment = FedexProcessShipmentRequest(CONFIG_OBJ, customer_transaction_id=CUSTOMER_TRANSACTION_ID)
service_type = source_doc.service_type_domestic
shipment.RequestedShipment.DropoffType = source_doc.drop_off_type
shipment.RequestedShipment.ServiceType = service_type
shipment.RequestedShipment.PackagingType = source_doc.packaging_type
# Shipper contact info.
shipment.RequestedShipment.Shipper.Contact.PersonName = source_doc.shipper_contact_person_name
shipment.RequestedShipment.Shipper.Contact.CompanyName = source_doc.shipper_company_name
shipment.RequestedShipment.Shipper.Contact.PhoneNumber = source_doc.shipper_contact_phone_number
# Shipper address.
shipment.RequestedShipment.Shipper.Address.StreetLines = [source_doc.shipper_address_street_lines]
shipment.RequestedShipment.Shipper.Address.City = source_doc.shipper_address_city
shipment.RequestedShipment.Shipper.Address.StateOrProvinceCode = source_doc.shipper_address_state_or_province_code
shipment.RequestedShipment.Shipper.Address.PostalCode = source_doc.shipper_address_postal_code
shipment.RequestedShipment.Shipper.Address.CountryCode = source_doc.shipper_address_country_code
if source_doc.recipient_address_residential:
shipment.RequestedShipment.Recipient.Address.Residential = True
else:
shipment.RequestedShipment.Recipient.Address.Residential = False
# Recipient contact info.
shipment.RequestedShipment.Recipient.Contact.PersonName = source_doc.recipient_contact_person_name
shipment.RequestedShipment.Recipient.Contact.CompanyName = source_doc.recipient_company_name
shipment.RequestedShipment.Recipient.Contact.PhoneNumber = source_doc.recipient_contact_phone_number
# Recipient addressStateOrProvinceCode
shipment.RequestedShipment.Recipient.Address.StreetLines = [source_doc.recipient_address_street_lines]
shipment.RequestedShipment.Recipient.Address.City = source_doc.recipient_address_city
shipment.RequestedShipment.Recipient.Address.StateOrProvinceCode = source_doc.recipient_address_state_or_province_code
shipment.RequestedShipment.Recipient.Address.PostalCode = source_doc.recipient_address_postal_code
shipment.RequestedShipment.Recipient.Address.CountryCode = source_doc.recipient_address_country_code
shipment.RequestedShipment.EdtRequestType = 'NONE'
# Senders account information
shipment.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.AccountNumber = CONFIG_OBJ.account_number
shipment.RequestedShipment.ShippingChargesPayment.PaymentType = source_doc.payment_type
shipment.RequestedShipment.LabelSpecification.LabelFormatType = 'COMMON2D'
shipment.RequestedShipment.LabelSpecification.ImageType = GENERATE_IMAGE_TYPE
shipment.RequestedShipment.LabelSpecification.LabelStockType = source_doc.label_stock_type
shipment.RequestedShipment.ShipTimestamp = datetime.datetime.now().replace(microsecond=0).isoformat()
shipment.RequestedShipment.LabelSpecification.LabelPrintingOrientation = 'TOP_EDGE_OF_TEXT_FIRST'
if hasattr(shipment.RequestedShipment.LabelSpecification, 'LabelOrder'):
del shipment.RequestedShipment.LabelSpecification.LabelOrder # Delete, not using.
# #############################################################################
DictDiffer.validate_shipment_integrity(source_doc)
# #############################################################################
# #############################################################################
all_boxes = source_doc.get_all_children("DTI Shipment Package")
# The total number of packages in the entire shipment
# (even when the shipment spans multiple transactions.)
shipment.RequestedShipment.PackageCount = len(all_boxes)
# First box
master_box = all_boxes[0]
box_sequence_number = 1
package = create_fedex_package(sequence_number=box_sequence_number,
shipment=shipment,
box=master_box,
source_doc=source_doc)
shipment.RequestedShipment.RequestedPackageLineItems = [package]
if source_doc.international_shipment:
"""
TotalWeight:
Identifies the total weight of the shipment being conveyed to FedEx.
This is only applicable to International shipments
and should only be used on the first package of a multiple piece shipment.
This value contains 1 explicit decimal position
"""
shipment.RequestedShipment.TotalWeight.Units = get_shipment_weight_units(source_doc)
shipment.RequestedShipment.TotalWeight.Value = get_total_shipment_value(source_doc=source_doc, attrib='weight_value')
label = send_request_to_fedex(master_box, shipment, box_sequence_number)
master_tracking_number = label.TrackingIds[0].TrackingNumber
master_tracking_id_type = label.TrackingIds[0].TrackingIdType
frappe.db.set(source_doc, 'tracking_number', master_tracking_number)
frappe.db.set(source_doc, 'master_tracking_id_type', master_tracking_id_type)
frappe.db.set(master_box, 'tracking_number', master_tracking_number)
save_label(label, master_tracking_number, GENERATE_IMAGE_TYPE.lower(), source_doc, master_box, box_sequence_number)
# ############################################################################
# ############################################################################
# For other boxes
for box in all_boxes[1:]:
box_sequence_number += 1
package = create_fedex_package(sequence_number=box_sequence_number,
shipment=shipment,
box=box,
source_doc=source_doc)
shipment.RequestedShipment.RequestedPackageLineItems = [package]
shipment.RequestedShipment.MasterTrackingId.TrackingNumber = master_tracking_number
shipment.RequestedShipment.MasterTrackingId.TrackingIdType = master_tracking_id_type
label = send_request_to_fedex(box, shipment, box_sequence_number)
save_label(label, master_tracking_number, GENERATE_IMAGE_TYPE.lower(), source_doc, box, box_sequence_number)
# ############################################################################
# ############################################################################
# ############################################################################
# ############################################################################
try:
delivery_time = estimate_fedex_delivery_time(OriginPostalCode=source_doc.shipper_address_postal_code,
OriginCountryCode=source_doc.shipper_address_country_code,
DestinationPostalCode=source_doc.recipient_address_postal_code,
DestinationCountryCode=source_doc.recipient_address_country_code)
frappe.db.set(source_doc, 'delivery_time', delivery_time)
frappe.msgprint("Delivery Time: %s" % delivery_time, "Updated!")
except Exception as error:
frappe.throw(_("Delivery time error - %s" % error))
# ############################################################################
# ############################################################################
try:
rate = get_all_shipment_rate(source_doc.name)
frappe.db.set(source_doc, 'shipment_rate',
"""
<p style="padding: 15px; align: center; color: #36414c; background-color: #F9FBB6; height: 80px; width: 450px;">
TotalNetChargeWithDutiesAndTaxes: <br>
<b>%s (%s)</b> </p>
""" % (rate["label"], rate["fee"]))
frappe.msgprint("Rate: %s (%s)" % (rate["label"], rate["fee"]), "Updated!")
except Exception as error:
frappe.throw(str(error))
frappe.db.set(source_doc, 'shipment_rate', "N/A")
# ############################################################################
# ############################################################################
frappe.db.set(source_doc, 'total_insurance', (get_total_shipment_value(source_doc=source_doc,
attrib='insurance')))
if source_doc.international_shipment:
frappe.db.set(source_doc, 'total_custom_value', sum([box.total_box_custom_value for box in all_boxes]))
frappe.db.set(source_doc, 'total_weight', '%s (%s)' % (get_total_shipment_value(source_doc=source_doc,
attrib='weight_value'),
get_shipment_weight_units(source_doc)))
# #############################################################################
# #############################################################################
frappe.msgprint("DONE!", "Tracking number:{}".format(master_tracking_number))
# #############################################################################
# #############################################################################
def save_label(label, master_tracking_number, image_type, source_doc, box, box_sequence_number):
box_tracking_number = label.TrackingIds[0].TrackingNumber
ascii_label_data = label.Label.Parts[0].Image
label_binary_data = binascii.a2b_base64(ascii_label_data)
frappe.db.set(box, 'tracking_number', box_tracking_number)
file_name = "label_%s_%s.%s" % (master_tracking_number, box_tracking_number, image_type)
saved_file = save_file(file_name, label_binary_data, source_doc.doctype, source_doc.name, is_private=1)
frappe.db.set(source_doc, 'label_%i' % box_sequence_number, saved_file.file_url)
# #############################################################################
# #############################################################################
def send_request_to_fedex(box, shipment, box_sequence_number):
try:
shipment.send_request()
return shipment.response.CompletedShipmentDetail.CompletedPackageDetails[0]
except Exception as error:
if "Customs Value is required" in str(error):
frappe.throw(_("International Shipment option is required".upper()))
else:
frappe.throw(_("[BOX # {}] Error from Fedex: {}".format(box_sequence_number, str(error))))
# #############################################################################
# #############################################################################
def get_shipment_weight_units(source_doc):
weight_units = set()
for box in source_doc.box_list:
items = parse_items_in_box(box)
for item in items:
weight_units.add(get_item_by_item_code(source_doc, item).weight_units)
if len(weight_units) > 1:
frappe.throw(_("Please select the same weight units for all items. They can't be different."))
return weight_units.pop()
# #############################################################################
# #############################################################################
def get_total_box_value(box, source_doc, attrib):
"""
Fox insurance, weight and etc. calculation for box
"""
box_total = 0
items = parse_items_in_box(box)
for item in items:
quantity_in_box = items[item]
box_total += getattr(get_item_by_item_code(source_doc=source_doc, item_code=item), attrib) * quantity_in_box
if attrib == "weight_value":
box_total += frappe.get_value("Shipping Package", box.packaging_type, "weight")
return box_total
def get_total_shipment_value(source_doc, attrib):
"""
Fox insurance, weight and etc.calculation for all shipment
"""
return sum([get_total_box_value(box, source_doc, attrib) for box in source_doc.box_list])
# #############################################################################
# #############################################################################
def parse_items_in_box(box):
items = {}
lines = box.items_in_box.split("\n")
for line in lines:
try:
item = line.split(":")
except ValueError:
frappe.msgprint(_("WARNING! Bad lines:%s" % line))
if item[0] in items:
items[item[0]] += int(item[1])
else:
items.update({item[0]: int(item[1])})
return items
# #############################################################################
# #############################################################################
def get_item_by_item_code(source_doc, item_code):
all_delivery_items = source_doc.get_all_children("DTI Shipment Note Item")
for item in all_delivery_items:
if item.item_code == item_code:
return item
# #############################################################################
# #############################################################################
@frappe.whitelist()
def get_fedex_packages_rate(international=False,
DropoffType=None,
ServiceType=None,
PackagingType=None,
ShipperStateOrProvinceCode=None,
ShipperPostalCode=None,
ShipperCountryCode=None,
RecipientStateOrProvinceCode=None,
RecipientPostalCode=None,
RecipientCountryCode=None,
EdtRequestType=None,
IsResidential=False,
PaymentType=None,
package_list=None,
ignoreErrors=False,
single_rate=False,
signature_option=None,
exceptions=None,
delivery_date=None,
saturday_delivery=False,
flat_rate=False):
"""
:param international:
:param DropoffType:
:param ServiceType:
:param PackagingType:
:param ShipperStateOrProvinceCode:
:param ShipperPostalCode:
:param ShipperCountryCode:
:param RecipientStateOrProvinceCode:
:param RecipientPostalCode:
:param RecipientCountryCode:
:param EdtRequestType:
:param PaymentType:
:param package_list:
:return: data rate
EXAMPLE
DropoffType: 'REGULAR_PICKUP',
ServiceType:'FEDEX_GROUND',
PackagingType: 'YOUR_PACKAGING',
ShipperStateOrProvinceCode:'SC',
ShipperPostalCode: '29631',
ShipperCountryCode:'US',
RecipientStateOrProvinceCode:'NC',
RecipientPostalCode:'27577',
RecipientCountryCode:'US',
EdtRequestType:'NONE',
PaymentType:'SENDER',
package_list:
[{"weight_value":"1",
"weight_units":"LB",
"physical_packaging":"BOX",
"group_package_count":"1",
"insured_amount":"100"},
{"weight_value":"10004000",
"weight_units":"LB",
"physical_packaging":"BOX",
"group_package_count":"2",
"insured_amount":"100"}]
_______________________________
KNOWN ISSUES (FEDEX TEST SERVER)
Test server caches rate for the same Shipper/Recipient data
"""
# Initiate Fedex request
if international:
rate = FedexInternationalRateServiceRequest(CONFIG_OBJ)
else:
rate = FedexRateServiceRequest(CONFIG_OBJ)
# Set Fedex shipping details
rate.RequestedShipment.DropoffType = DropoffType
rate.RequestedShipment.ServiceType = ServiceType
rate.RequestedShipment.PackagingType = PackagingType
# Set shipper address details
rate.RequestedShipment.Shipper.Address.StateOrProvinceCode = ShipperStateOrProvinceCode
rate.RequestedShipment.Shipper.Address.PostalCode = ShipperPostalCode
rate.RequestedShipment.Shipper.Address.CountryCode = ShipperCountryCode
# Set reciever address details
if RecipientStateOrProvinceCode:
rate.RequestedShipment.Recipient.Address.StateOrProvinceCode = RecipientStateOrProvinceCode
rate.RequestedShipment.Recipient.Address.PostalCode = RecipientPostalCode
rate.RequestedShipment.Recipient.Address.CountryCode = RecipientCountryCode
rate.RequestedShipment.Recipient.Address.Residential = IsResidential
# Set payer details
rate.RequestedShipment.EdtRequestType = EdtRequestType
rate.RequestedShipment.ShippingChargesPayment.PaymentType = PaymentType
# Set special services, if applicable
# Fedex One Rate
if flat_rate:
rate.RequestedShipment.SpecialServicesRequested.SpecialServiceTypes = "FEDEX_ONE_RATE"
# Fedex Saturday Delivery
elif saturday_delivery:
if not delivery_date:
frappe.throw(_("Please specify Ship Date for Saturday Delivery"))
delivery_datetime = frappe.utils.get_datetime(delivery_date)
rate.RequestedShipment.SpecialServicesRequested.SpecialServiceTypes = "SATURDAY_DELIVERY"
rate.RequestedShipment.ShipTimestamp = delivery_datetime.isoformat()
# Create Fedex shipments for each package
for package in package_list:
# Set package weights
pkg_weight = rate.create_wsdl_object_of_type('Weight')
pkg_weight.Value = package["weight_value"]
pkg_weight.Units = package["weight_units"]
# Set package content details
pkg_obj = rate.create_wsdl_object_of_type('RequestedPackageLineItem')
pkg_obj.Weight = pkg_weight
pkg_obj.GroupPackageCount = package["group_package_count"]
# Set packaging details
if flat_rate:
rate.RequestedShipment.PackagingType = PackagingType
pkg_obj.PhysicalPackaging = frappe.db.get_value("Shipping Package", {"box_code": PackagingType}, "physical_packaging")
elif package.get("packaging_type"):
box_doc = frappe.get_doc("Shipping Package", package.get("packaging_type"))
rate.RequestedShipment.PackagingType = box_doc.box_code
pkg_obj.PhysicalPackaging = box_doc.physical_packaging
if box_doc.box_code == "YOUR_PACKAGING":
pkg_dim = rate.create_wsdl_object_of_type("Dimensions")
pkg_dim.Length = cint(box_doc.length)
pkg_dim.Width = cint(box_doc.width)
pkg_dim.Height = cint(box_doc.height)
pkg_dim.Units = "IN"
pkg_obj.Dimensions = pkg_dim
# Set insurance amounts
pkg_insurance = rate.create_wsdl_object_of_type('Money')
pkg_insurance.Currency = "USD"
pkg_insurance.Amount = package["insured_amount"]
pkg_obj.InsuredValue = pkg_insurance
# Set additional surcharges
if signature_option:
pkg_obj.SpecialServicesRequested.SpecialServiceTypes = 'SIGNATURE_OPTION'
pkg_obj.SpecialServicesRequested.SignatureOptionDetail.OptionType = signature_option
rate.add_package(pkg_obj)
try:
# Get rates for all the packages
rate.send_request()
except Exception as e:
print(e)
if exceptions is not None:
exceptions.append({"type": "request", "exception": e})
if 'RequestedPackageLineItem object cannot be null or empty' in str(e):
raise Exception("WARNING: Please create packages with shipment")
elif not ignoreErrors:
frappe.throw(str(e))
return None
response_json = subject_to_json(rate.response)
data = json.loads(response_json)
if "Service is not allowed" in str(data['Notifications'][0]['Message']):
if ignoreErrors:
return None
debug_info = "%s <br> %s <br> %s" % (rate.RequestedShipment.ServiceType, rate.RequestedShipment.Shipper, rate.RequestedShipment.Recipient)
frappe.throw(_("WARNING: Service is not allowed. Please verify address data! <br> % s" % debug_info))
rates = []
try:
for service in data["RateReplyDetails"]:
rates.append({
'fee': service['RatedShipmentDetails'][0]['ShipmentRateDetail']['TotalNetChargeWithDutiesAndTaxes']['Amount'],
'label' : service['ServiceType'].replace("_", " "),
'name' : service['ServiceType'],
'special_rates_applied': service['RatedShipmentDetails'][0]['ShipmentRateDetail'].get('SpecialRatingApplied', [])
})
except KeyError as e:
if exceptions is not None:
exceptions.append({"type": "keyerror", "exception": e})
if not ignoreErrors:
frappe.throw(data)
return
if single_rate:
return rates[0]
else:
return rates
@frappe.whitelist()
def get_all_shipment_rate(doc_name):
source_doc = frappe.get_doc("DTI Shipment Note", doc_name)
BOXES = source_doc.get_all_children("DTI Shipment Package")
rate_box_list = []
for i, box in enumerate(BOXES):
box_weight_value = get_total_box_value(box=box, source_doc=source_doc, attrib='weight_value')
box_weight_units = get_shipment_weight_units(source_doc)
box_insurance = get_total_box_value(box=box, source_doc=source_doc, attrib='insurance')
rate_box_list.append({'weight_value': box_weight_value,
'weight_units': box_weight_units,
'physical_packaging': box.physical_packaging,
'packaging_type': box.packaging_type,
'group_package_count': i+1,
'insured_amount': box_insurance})
if source_doc.international_shipment:
service_type = source_doc.service_type_international
else:
service_type = source_doc.service_type_domestic
return get_fedex_packages_rate(international=source_doc.international_shipment,
DropoffType=source_doc.drop_off_type,
ServiceType=service_type,
PackagingType=source_doc.packaging_type,
ShipperStateOrProvinceCode=source_doc.shipper_address_state_or_province_code,
ShipperPostalCode=source_doc.shipper_address_postal_code,
ShipperCountryCode=source_doc.shipper_address_country_code,
RecipientStateOrProvinceCode=source_doc.recipient_address_state_or_province_code,
RecipientPostalCode=source_doc.recipient_address_postal_code,
RecipientCountryCode=source_doc.recipient_address_country_code,
IsResidential=source_doc.recipient_address_residential,
EdtRequestType='NONE',
PaymentType=source_doc.payment_type,
package_list=rate_box_list,
signature_option=source_doc.signature_option,
single_rate=True)
# #############################################################################
# #############################################################################
@frappe.whitelist()
def show_shipment_estimates(doc_name):
"""
Fedex's shipping calculator estimates the time and cost of delivery based on the destination and service.
"""
source_doc = frappe.get_doc("DTI Shipment Note", doc_name)
DictDiffer.validate_shipment_integrity(source_doc=source_doc)
frappe.msgprint("Shipment calculator estimates the time and cost of delivery based on the destination and service.",
"INFO")
# ===============================================================
# Delivery time
time = estimate_fedex_delivery_time(OriginPostalCode=source_doc.recipient_address_postal_code,
OriginCountryCode=source_doc.recipient_address_postal_code,
DestinationPostalCode=source_doc.shipper_address_country_code,
DestinationCountryCode=source_doc.shipper_address_postal_code)
frappe.msgprint("<b>Delivery time</b> : %s" % time, "INFO")
# ===============================================================
# Calculate Rate:
rate_box_list = []
for i, box in enumerate(source_doc.get_all_children("DTI Shipment Package")):
box_weight_value = get_total_box_value(box=box, source_doc=source_doc, attrib='weight_value')
box_weight_units = get_shipment_weight_units(source_doc)
box_insurance = get_total_box_value(box=box, source_doc=source_doc, attrib='insurance')
rate_box_list.append({'weight_value': box_weight_value,
'weight_units': box_weight_units,
'physical_packaging': box.physical_packaging,
'packaging_type' : box.packaging_type,
'group_package_count': i+1,
'insured_amount': box_insurance})
# TODO - Remove YOUR_PACKAGING and use real PackagingType from doc, investigate error:
# Service is not allowed. (Code = 868)
rates = get_fedex_packages_rate(international=source_doc.international_shipment,
DropoffType=source_doc.drop_off_type,
PackagingType='YOUR_PACKAGING',
ShipperStateOrProvinceCode=source_doc.shipper_address_state_or_province_code,
ShipperPostalCode=source_doc.shipper_address_postal_code,
ShipperCountryCode=source_doc.shipper_address_country_code,
RecipientStateOrProvinceCode=source_doc.recipient_address_state_or_province_code,
RecipientPostalCode=source_doc.recipient_address_postal_code,
RecipientCountryCode=source_doc.recipient_address_country_code,
EdtRequestType='NONE',
IsResidential=source_doc.recipient_address_residential,
signature_option=source_doc.signature_option,
PaymentType=source_doc.payment_type,
package_list=rate_box_list)
for rate in rates:
frappe.msgprint("<b>%s</b> : %s (%s)<br>" % (rate["label"], rate["fee"], "USD"))
# #############################################################################
# #############################################################################
def delete_fedex_shipment(source_doc):
del_request = FedexDeleteShipmentRequest(CONFIG_OBJ)
del_request.DeletionControlType = "DELETE_ALL_PACKAGES"
del_request.TrackingId.TrackingNumber = source_doc.tracking_number
del_request.TrackingId.TrackingIdType = source_doc.master_tracking_id_type
try:
del_request.send_request()
except Exception as e:
if 'Unable to retrieve record' in str(e):
raise Exception("WARNING: Unable to delete the shipment with the provided tracking number.")
else:
raise Exception("ERROR: %s. Tracking number: %s. Type: %s" % (e, source_doc.tracking_number, source_doc.master_tracking_id_type))
# #############################################################################
# #############################################################################
def get_fedex_shipment_status(track_value):
track = FedexTrackRequest(CONFIG_OBJ, customer_transaction_id=CUSTOMER_TRANSACTION_ID)
track.SelectionDetails.PackageIdentifier.Type = 'TRACKING_NUMBER_OR_DOORTAG'
track.SelectionDetails.PackageIdentifier.Value = track_value
del track.SelectionDetails.OperatingCompany
try:
track.send_request()
return track.response[4][0].TrackDetails[0].Events[0].EventDescription
except AttributeError:
return None
except FedexError as error:
frappe.throw(_("Fedex error! {} {}".format(error, get_fedex_server_info())))
# #############################################################################
# #############################################################################
@frappe.whitelist(allow_guest=True)
def get_html_code_status_with_fedex_tracking_number(track_value):
"""
FOR WEB PAGE WITH SHIPMENT TRACKING - shipment_tracking.html
:param track_value:
:return:
"""
if not track_value:
return "Track value can't be empty"
track = FedexTrackRequest(CONFIG_OBJ, customer_transaction_id=CUSTOMER_TRANSACTION_ID)
track.SelectionDetails.PackageIdentifier.Type = 'TRACKING_NUMBER_OR_DOORTAG'
track.SelectionDetails.PackageIdentifier.Value = track_value
del track.SelectionDetails.OperatingCompany
try:
track.send_request()
html = ""
for match in track.response.CompletedTrackDetails[0].TrackDetails:
html += "<b>Tracking #:</b> {}".format(match.TrackingNumber)
if hasattr(match, 'TrackingNumberUniqueIdentifier'):
html += "<br><b>UniqueID:</b> {}".format(match.TrackingNumberUniqueIdentifier)
if hasattr(match, 'Notification'):
html += "<br>{}".format(match.Notification.Message)
if hasattr(match, 'StatusDetail.Description'):
html += "<br>Status Description: {}".format(match.StatusDetail.Description)
if hasattr(match, 'StatusDetail.AncillaryDetails'):
html += "<br>Status AncillaryDetails Reason: {}".format(match.StatusDetail.AncillaryDetails[-1].Reason)
html += "<br>Status AncillaryDetails Description: {}".format(
match.StatusDetail.AncillaryDetails[-1].ReasonDescription)
if hasattr(match, 'ServiceCommitMessage'):
html += "<br><b>{}</b>".format(match.ServiceCommitMessage)
html += "<br><br>"
return html
except Exception as error:
return """<b>ERROR :</b><br> Fedex invalid configuration error! <br>{0}<br><br>{1} """.format(error.value,
get_fedex_server_info())
# #############################################################################
# #############################################################################
class DictDiffer(object):
"""
Calculate the difference between two dictionaries as:
(1) items added
(2) items removed
(3) keys same in both but changed values
(4) keys same in both and unchanged values
"""
def __init__(self, current_dict, past_dict):
self.current_dict, self.past_dict = current_dict, past_dict
self.set_current, self.set_past = set(current_dict.keys()), set(past_dict.keys())
self.intersect = self.set_current.intersection(self.set_past)
def added(self):
return self.set_current - self.intersect
def removed(self):
return self.set_past - self.intersect
def changed(self):
return set(o for o in self.intersect if self.past_dict[o] != self.current_dict[o])
def unchanged(self):
return set(o for o in self.intersect if self.past_dict[o] == self.current_dict[o])
@staticmethod
def validate_shipment_integrity(source_doc):
"""
Basic validation that shipment is correct.
That all items from delivery note are in boxes and etc.
"""
boxes = source_doc.get_all_children("DTI Shipment Package")
if len(boxes) > 9:
frappe.throw(_("Max amount of packages is 10"))
if not boxes:
frappe.throw(_("Please create shipment box packages!"))
parsed_items_per_box = {i: parse_items_in_box(package) for i, package in enumerate(boxes)}
all_items_in_all_boxes = {}
for box in parsed_items_per_box:
for item_code in parsed_items_per_box[box]:
if item_code in all_items_in_all_boxes:
all_items_in_all_boxes[item_code] += parsed_items_per_box[box][item_code]
else:
all_items_in_all_boxes.update({item_code: int(parsed_items_per_box[box][item_code])})
delivery_items_dict = {}
for item in source_doc.get_all_children("DTI Shipment Note Item"):
if item.item_code in delivery_items_dict:
delivery_items_dict[item.item_code] += int(item.qty)
else:
delivery_items_dict.update({item.item_code: int(item.qty)})
differ = DictDiffer(delivery_items_dict, all_items_in_all_boxes)
if differ.changed():
delivery_string = "<br>".join("%s = %i" % (item, delivery_items_dict[item]) for item in delivery_items_dict)
all_items_string = "<br>".join(
"%s = %i" % (item, all_items_in_all_boxes[item]) for item in all_items_in_all_boxes)
error_message = """<b style="color:orange;">WARNING!</b><br>
Integrity error for: <b>{}</b> <br>
<hr>
<b>DELIVERY ITEMS:</b> <br>{} <br><br>
<b>ITEMS IN BOX:</b> <br>{}""".format(",".join(differ.changed()), delivery_string, all_items_string)
frappe.throw(_(error_message))
| 1.625 | 2 |
identifyneeds/repository.py | robinstauntoncollins/identifyneeds | 0 | 12772702 | from typing import List, Dict
from identifyneeds.entities import Condition
class MemRepo():
def __init__(self, condition_dicts):
self.conditions = {}
self.put(condition_dicts)
def get(self, filters: Dict = None):
condition_objects = [Condition.from_dict(i) for i in self.conditions.values()]
if not filters:
return condition_objects
if 'name' in filters.keys():
return [item for item in condition_objects if item.name in filters['name']]
raise NotImplementedError(f"Filtering by something other than 'name' is not supported")
def put(self, condition_dicts: List[dict]):
self._check_types(condition_dicts)
for cnd in condition_dicts:
self.conditions[cnd['uuid']] = cnd
def _check_types(self, conditions: List[dict]):
if type(conditions) is not list:
raise TypeError(f"Expected List of 'dicts'. Received: {type(conditions)}")
for cnd in conditions:
if type(cnd) is not dict:
raise TypeError(f"Expected 'dict' got {type(cnd)}")
| 2.796875 | 3 |
biblib/algo.py | jonasteuwen/biblib | 0 | 12772703 | <filename>biblib/algo.py
"""Algorithms for manipulating BibTeX data.
This module implements various algorithms supplied by BibTeX to style
files, as well as some algorithms to make BibTeX data more accessible
to Python.
"""
__all__ = (
"Name parse_names "
+ "parse_month "
+ "title_case "
+ "TeXProcessor TeXToUnicode tex_to_unicode"
).split()
import re
import collections
import unicodedata
import string
from . import messages
# Control sequences (defined as "control_seq_ilk" in bibtex) and their
# Unicode translations. This is similar to, but slightly different
# from the TeX definitions (of course).
_CONTROL_SEQS = {
"\\i": "ı",
"\\j": "ȷ",
"\\oe": "œ",
"\\OE": "Œ",
"\\ae": "æ",
"\\AE": "Æ",
"\\aa": "å",
"\\AA": "Å",
"\\o": "ø",
"\\O": "Ø",
"\\l": "ł",
"\\L": "Ł",
"\\ss": "ß",
}
class NameParser:
def __init__(self):
pass
def __depth(self, data):
depth, depths = 0, [0] * len(data)
for pos, ch in enumerate(data):
depths[pos] = depth
if ch == "{":
depth += 1
depths[pos] = depth
elif ch == "}":
depth -= 1
return depths
def __split_depth0(self, regexp, data, flags=0):
regexp = re.compile(regexp, flags=flags)
depths = self.__depth(data)
parts, last = [], 0
for m in regexp.finditer(data):
if depths[m.start()] == 0:
parts.append(data[last : m.start()])
last = m.end()
if regexp.groups:
parts.extend(m.groups())
parts.append(data[last:])
return parts
def _first_char(self, data):
"""Return the first character of data (in bibtex's sense)."""
# XXX Should this be pulled out as some generic algorithm?
pos = 0
depths = self.__depth(data)
while True:
if pos == len(data):
return ""
elif data[pos].isalpha():
return data[pos]
elif data.startswith("{\\", pos):
# Special character
pos += 1
m = re.compile(r"\\[a-zA-Z]+").match(data, pos)
if m and m.group() in _CONTROL_SEQS:
# Known bibtex control sequence
return _CONTROL_SEQS[m.group()]
# Scan for the first alphabetic character
while pos < len(data) and depths[pos]:
if data[pos].isalpha():
return data[pos]
pos += 1
elif data[pos] == "{":
# Skip brace group
while pos < len(data) and depths[pos]:
pos += 1
else:
pos += 1
def __split_von_last(self, toks):
# See von_name_ends_and_last_name_starts_stuff
for von_end in range(len(toks) - 1, 1, -2):
if self._first_char(toks[von_end - 2]).islower():
return (toks[: von_end - 1], toks[von_end:])
return ([], toks)
def parse(self, string, pos):
"""Parse a BibTeX name list.
Returns a list of Name objects. Raises InputError if there is
a syntax error.
"""
# See x_format_name
# Split names (see name_scan_for_and)
name_strings = [
n.strip()
for n in self.__split_depth0(
"[ \t]and(?=[ \t])", string, flags=re.IGNORECASE
)
]
# Process each name
names = []
for name_string in name_strings:
# Remove leading and trailing white space, ~, and -, and
# trailing commas.
name_string = name_trailing = name_string.lstrip("-~ \t")
name_string = name_string.rstrip("-~ \t,")
if "," in name_trailing[len(name_string) :]:
# BibTeX warns about this because it often indicates a
# bigger syntax problem
pos.warn("trailing comma after name `{}'".format(name_string))
# Split on depth-0 commas and further split tokens in each
# part, keeping only the first connector between each
# token.
parts = [
self.__split_depth0("([-~ \t])[-~ \t]*", part.strip())
for part in self.__split_depth0(",", name_string)
]
# Process name depending on how many commas there were
first = von = last = jr = []
if len(parts) == 1:
# "First von Last"
toks = parts[0]
# The von tokens start with the first lower-case token
# (but cannot start at the last token)
for von_start in range(0, len(toks) - 2, 2):
if self._first_char(toks[von_start]).islower():
# Found beginning; now find the end
first = toks[: max(0, von_start - 1)]
von, last = self.__split_von_last(toks[von_start:])
break
else:
# No von tokens. Find hyphen-connected last name
# tokens.
for last_start in range(len(toks) - 1, -1, -2):
if last_start and toks[last_start - 1] != "-":
break
first = toks[: max(0, last_start - 1)]
last = toks[last_start:]
elif 2 <= len(parts) <= 3:
# "von Last, First[, Jr]"
von, last = self.__split_von_last(parts[0])
first = parts[1]
if len(parts) == 3:
jr = parts[2]
else:
pos.raise_error("too many commas in name `{}'".format(name_string))
names.append(Name("".join(first), "".join(von), "".join(last), "".join(jr)))
return names
class Name(collections.namedtuple("Name", "first von last jr")):
"""A parsed name.
The name is parsed in to first name, "von", last name, and the
complement (or "jr"). Each component is in uninterpreted form
(e.g., TeX syntax). Missing components are set to the empty
string.
"""
def is_others(self):
return (
self.first == ""
and self.von == ""
and self.last == "others"
and self.jr == ""
)
def pretty(self, template="{first} {von} {last} {jr}"):
"""Pretty-print author according to template.
The template is a 'format' template with the added feature
that literal text surrounding fields that expand to empty
strings is prioritized, rather than concatenated.
Specifically, of the literal text snippets between two
non-null fields, only the first of the highest priority is
kept, where non-white space outranks white space outranks the
empty string. Literal text before and after the first and
last fields is always kept.
Hence, if the template is '{von} {last}, {first}, {jr}' and
the name has a last and a jr not no von or first, then the
first comma will be kept and the space and second dropped. If
the name has only a von and a last, then both commas will be
dropped. If the name has only a last, then all separators
will be dropped.
"""
# XXX BibTeX's own format.name$ templates are more
# sophisticated than this, and it's not clear these are easier
# to use. These do have the (dubious) benefit of having
# access to the usual format machinery.
def priority(string):
if not string:
return 0
elif string.isspace():
return 1
return 2
fields = {
"first": self.first,
"von": self.von,
"last": self.last,
"jr": self.jr,
}
f = string.Formatter()
pieces = [""]
first_field, last_field = 0, -1
leading = trailing = ""
for i, (literal_text, field_name, format_spec, conv) in enumerate(
f.parse(template)
):
if i == 0:
# Always keep leading text
leading = literal_text
elif field_name is None:
# Always keep trailing test
trailing = literal_text
elif priority(literal_text) > priority(pieces[-1]):
# Overrides previous piece
pieces[-1] = literal_text
if field_name is not None:
obj, _ = f.get_field(field_name, (), fields)
if not obj:
continue
obj = f.convert_field(obj, conv)
if first_field == 0:
first_field = len(pieces)
last_field = len(pieces)
pieces.extend([f.format_field(obj, format_spec), ""])
# Only keep the pieces between non-null fields
pieces = pieces[first_field : last_field + 1]
return leading + "".join(pieces) + trailing
def parse_names(string, pos=messages.Pos.unknown):
"""Parse a BibTeX name list (e.g., an author or editor field).
Returns a list of Name objects. The parsing is equivalent to
BibTeX's built-in "format.name$" function. Raises InputError if
there is a syntax error.
"""
return NameParser().parse(string, pos)
_MONTHS = "January February March April May June July August September October November December".lower().split()
def parse_month(string, pos=messages.Pos.unknown):
"""Parse a BibTeX month field.
This performs fairly fuzzy parsing that supports all standard
month macro styles (and then some).
Raises InputError if the field cannot be parsed.
"""
val = string.strip().rstrip(".").lower()
for i, name in enumerate(_MONTHS):
if name.startswith(val) and len(val) >= 3:
return i + 1
pos.raise_error("invalid month `{}'".format(string))
CS_RE = re.compile(r"\\[a-zA-Z]+")
def title_case(string, pos=messages.Pos.unknown):
"""Convert to title case (like BibTeX's built-in "change.case$").
Raises InputError if the title string contains syntax errors.
"""
# See "@<Perform the case conversion@>"
out = []
level, prev_colon, pos = 0, False, 0
while pos < len(string):
keep = pos == 0 or (prev_colon and string[pos - 1] in " \t\n")
if level == 0 and string.startswith("{\\", pos) and not keep:
# Special character
out.append(string[pos])
pos += 1
level += 1
while level and pos < len(string):
if string[pos] == "\\":
m = CS_RE.match(string, pos)
if m:
if m.group() in _CONTROL_SEQS:
# Lower case control sequence
out.append(m.group().lower())
else:
# Unknown control sequence, keep case
out.append(m.group())
pos = m.end()
continue
elif string[pos] == "{":
level += 1
elif string[pos] == "}":
level -= 1
# Lower-case non-control sequence
out.append(string[pos].lower())
pos += 1
prev_colon = False
continue
# Handle braces
char = string[pos]
if char == "{":
level += 1
elif char == "}":
if level == 0:
pos.raise_error("unexpected }")
level -= 1
# Handle colon state
if char == ":":
prev_colon = True
elif char not in " \t\n":
prev_colon = False
# Change case of a regular character
if level > 0 or keep:
out.append(string[pos])
else:
out.append(string[pos].lower())
pos += 1
return "".join(out)
# A TeX control sequence is
#
# 1) an active character (subsequent white space is NOT ignored) or,
# 2) a \ followed by either
# 2.1) a sequence of letter-category characters (subsequent white
# space is ignored), or
# 2.2) a single space-category character (subsequent white space is
# ignored), or
# 2.3) a single other character (subsequent white space is NOT
# ignored).
#
# This regexp assumes plain TeX's initial category codes. Technically
# only ~ and \f are active characters, but we include several other
# special characters that we want to abort on.
tex_cs_re = re.compile(r"([~\f$&#^_]|(\\[a-zA-Z]+|\\[ \t\r\n])|\\.)(?(2)[ \t\r\n]*)")
class TeXProcessor:
"""Base class for simple TeX macro processors.
This assumes the initial category codes set up by plain.tex (and,
likewise, LaTeX).
"""
def process(self, string, pos):
"""Expand active characters and macros in string.
Raises InputError if it encounters an active character or
macro it doesn't recognize.
"""
self.__data = string
self.__off = 0
self.__pos = pos
# Process macros
while True:
m = tex_cs_re.search(self.__data, self.__off)
if not m:
break
self.__off = m.end()
macro = m.group(1)
nval = self._expand(macro)
if nval is None:
if macro.startswith("\\"):
pos.raise_error("unknown macro `{}'".format(macro))
pos.raise_error("unknown special character `{}'".format(macro))
self.__data = self.__data[: m.start()] + nval + self.__data[self.__off :]
self.__off = m.start() + len(nval)
return self.__data
def _scan_argument(self):
"""Scan an return a macro argument."""
if self.__off >= len(self.__data):
self.__pos.raise_error("macro argument expected")
if self.__data[self.__off] == "{":
start = self.__off
depth = 0
while depth or self.__off == start:
if self.__data[self.__off] == "{":
depth += 1
elif self.__data[self.__off] == "}":
depth -= 1
self.__off += 1
return self.__data[start + 1 : self.__off - 1]
elif self.__data[self.__off] == "\\":
m = tex_cs_re.match(self.__data, self.__off)
self.__off = m.end()
return m.group(1)
else:
arg = self.__data[self.__off]
self.__off += 1
return arg
def _expand(self, cs):
"""Return the expansion of an active character or control sequence.
Returns None if the sequence is unknown. This should be
overridden by sub-classes.
"""
return None
class TeXToUnicode(TeXProcessor):
"""A simple TeX-to-unicode converter.
This interprets accents and other special tokens like '--' and
eliminates braces.
"""
# Simple TeX-to-Unicode replacements
_SIMPLE = {
# Active characters
"~": "\u00A0",
# chardefs from plain.tex
"\\%": "%",
"\\&": "&",
"\\#": "#",
"\\$": "$",
"\\ss": "ß",
"\\ae": "æ",
"\\oe": "œ",
"\\o": "ø",
"\\AE": "Æ",
"\\OE": "Œ",
"\\O": "Ø",
"\\i": "ı",
"\\j": "ȷ",
"\\aa": "å",
"\\AA": "Å",
"\\l": "ł",
"\\L": "Ł",
# Other defs from plain.tex
"\\_": "_",
"\\dag": "†",
"\\ddag": "‡",
"\\S": "§",
"\\P": "¶",
}
# TeX accent control sequences to Unicode combining characters
_ACCENTS = {
# Accents defined in plain.tex
"\\`": "\u0300",
"\\'": "\u0301",
"\\v": "\u030C",
"\\u": "\u0306",
"\\=": "\u0304",
"\\^": "\u0302",
"\\.": "\u0307",
"\\H": "\u030B",
"\\~": "\u0303",
'\\"': "\u0308",
"\\d": "\u0323",
"\\b": "\u0331",
"\\c": "\u0327",
# Other accents that seem to be standard, but I can't find
# their definitions
"\\r": "\u030A",
"\\k": "\u0328",
}
def process(self, string, pos):
string = super().process(string, pos)
# Handle ligatures that are unique to TeX. This must be done
# after macro expansion, but before brace removal because
# braces inhibit ligatures.
string = string.replace("---", "\u2014").replace("--", "\u2013")
# Remove braces
return string.replace("{", "").replace("}", "")
def _expand(self, cs):
if cs in self._SIMPLE:
return self._SIMPLE[cs]
if cs in self._ACCENTS:
arg = self._scan_argument()
if len(arg) == 0:
seq, rest = " " + self._ACCENTS[cs], ""
elif arg.startswith("\\i") or arg.startswith("\\j"):
# Unicode combining marks should be applied to the
# regular i, not the dotless i.
seq, rest = arg[1] + self._ACCENTS[cs], arg[2:]
else:
seq, rest = arg[0] + self._ACCENTS[cs], arg[1:]
return unicodedata.normalize("NFC", seq) + rest
return None
def tex_to_unicode(string, pos=messages.Pos.unknown):
"""Convert a BibTeX field value written in TeX to Unicode.
This interprets accents and other special tokens like '--' and
eliminates braces. Raises InputError if it encounters a macro it
doesn't understand.
Note that BibTeX's internal understanding of accented characters
(e.g., purify$ and change.case$) is much more limited than TeX's.
This implements something closer to TeX on the assumption that the
goal is to display the string.
"""
return TeXToUnicode().process(string, pos)
| 2.953125 | 3 |
mapscript/python/mapscript/__init__.py | tomkralidis/MapServer | 0 | 12772704 | <gh_stars>0
import sys
import platform
import os
import inspect
# As of Python 3.8 PATH can no longer be used to resolve the MapServer
# DLLs on Windows. Instead users will be required to set a new MAPSERVER_DLL_PATH
# environment variable.
# See https://docs.python.org/3/whatsnew/3.8.html#changes-in-the-python-api
def add_dll_path(pth):
if (3, 8) <= sys.version_info:
os.add_dll_directory(pth)
else:
# add the directory to the Windows path for earlier Python version
os.environ['PATH'] = pth + ';' + os.environ['PATH']
if platform.system() == 'Windows':
mapserver_dll_path = os.getenv('MAPSERVER_DLL_PATH', '')
dll_paths = list(filter(os.path.exists, mapserver_dll_path.split(';')))
# add paths in the order listed in the string
dll_paths.reverse()
for pth in dll_paths:
add_dll_path(pth)
from .mapscript import *
# change all the class module names from mapscript.mapscript to mapscript
for key, value in globals().copy().items():
if inspect.isclass(value) and value.__module__.startswith('mapscript.'):
value.__module__= 'mapscript'
# remove the submodule name
del mapscript
| 2.3125 | 2 |
src/zif/sedna/tests.py | bitkeeper/zif.sedna | 0 | 12772705 | <filename>src/zif/sedna/tests.py
import doctest
import unittest
def doTests():
doctest.testfile('README.txt')
doctest.testfile('README_sednaobject.txt')
def test_suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocFileSuite('README.txt'))
suite.addTest(doctest.DocFileSuite('README_sednaobject.txt'))
#suite.addTest(doctest.DocFileSuite('rtestpath.txt'))
return suite
#return unittest.Suite((
#unittest.DocFileSuite('README.txt'),
#unittest.DocFileSuite('README_sednaobject.txt')))
if __name__ == '__main__':
runner = unittest.TextTestRunner()
suite = test_suite()
runner.run(suite)
| 1.929688 | 2 |
BetterBolus.py | crsullivan/BetterBolus | 0 | 12772706 | import requests
import tkinter as tk
from tkinter import filedialog, Text
import main as main
from sortedcontainers import SortedSet
# This file runs the main program
def raise_frame(frame):
frame.tkraise()
root=tk.Tk()
root.title("Better Bolus V2.0")
# root.iconbitmap("../favicon.ico")
f1 = tk.Frame(root)
f2 = tk.Frame(root)
f3 = tk.Frame(root)
f4 = tk.Frame(root)
for frame in (f1, f2, f3, f4):
frame.grid(row=0, column=0, sticky='news')
# setting the windows size
root.geometry("500x500")
# bolus profile values as an object of arrays, each with two values,
# the first being the amount of insulin in units,
# and the second being the amount of time in minutes between
# now and when the bolus would be taken
bolusProfile = {}
insulinEffectUpdated = []
if len(insulinEffectUpdated) > 49:
insulinEffectUpdated = insulinEffectUpdated[48:]
# declaring string variables
bg_var = tk.StringVar()
trending_var = tk.StringVar()
bolus_var = tk.StringVar()
bolusTime_var = tk.StringVar()
# defining a function that will
# generate original graph based off BG
def submit_f1():
trending = trending_var.get()
# Validation
try:
bg = int(bg_var.get())
if 120 < int(bg_var.get()) < 450:
raise_frame(f2)
main.initial_bg(bg)
main.unadjusted_graph(trending)
main.show_unadjusted_graph()
else:
bg_label.config(text="Invalid input\nplease enter an integer between 120 and 450:")
except ValueError:
bg_label.config(text="Invalid input\nplease enter an integer between 120 and 450:")
bg_var.set("")
# defining a function that will
# generate graph with bolus profile applied
def submit_f2():
trending = trending_var.get()
time = bolusTime_var.get()
if bolusTime_var.get() == "":
time = '0.00'
if bolus_var.get() == "":
bolus = 0
bolusProfile.update({time: bolus})
insulinEffect = main.applyInitialBolus(bolusProfile)
insulinEffectResistanceAdjusted = main.build_resistance_profile(trending, insulinEffect)
root.destroy()
main.show_adjusted_graph(insulinEffectResistanceAdjusted)
try:
bolus = int(bolus_var.get())
if 0 <= bolus <= 10:
bolusProfile.update({time: bolus})
insulinEffect = main.applyInitialBolus(bolusProfile)
insulinEffectResistanceAdjusted = main.build_resistance_profile(trending, insulinEffect)
root.destroy()
main.show_adjusted_graph(insulinEffectResistanceAdjusted)
else:
bolus_label.config(text="Invalid input\nPlease enter a bolus between 0 and 10:")
except ValueError:
bolus_label.config(text="Invalid input\nPlease enter an integer between 0 and 10:")
bolus_var.set("")
# defining a function that will
# allow user to add more bolus info
def add_another_bolus():
time = bolusTime_var.get()
if bolusTime_var.get() == "":
time = '0.00'
try:
bolus = int(bolus_var.get())
if 0 <= bolus <= 10:
bolusProfile.update({time: bolus})
insulinEffect = main.applyInitialBolus(bolusProfile)
insulinEffectUpdated.append(main.bolusStack(insulinEffect, bolusProfile))
raise_frame(f3)
else:
bolus_label.config(text="Invalid input\nPlease enter a bolus between 0 and 10:")
except ValueError:
bolus_label.config(text="Invalid input\nPlease enter an integer between 0 and 10:")
bolus_var.set("")
def add_another_bolus2():
time = bolusTime_var.get()
if bolusTime_var.get() == "":
time = '0.00'
try:
bolus = int(bolus_var.get())
if 0 <= bolus <= 10:
bolusProfile.update({time: bolus})
insulinEffectUpdated.append(main.bolusStack(insulinEffectUpdated[len(insulinEffectUpdated) - 1], bolusProfile))
else:
bolus_label2.config(text="Invalid input\nPlease enter a bolus between 0 and 10:")
except ValueError:
bolus_label2.config(text="Invalid input\nPlease enter an integer between 0 and 10:")
bolus_var.set("")
def submit_f3():
trending = trending_var.get()
time = bolusTime_var.get()
if bolusTime_var.get() == "":
time = '0.00'
if bolus_var.get() == "":
insulinEffectUpdated.append(main.bolusStack(insulinEffectUpdated[len(insulinEffectUpdated) - 1], bolusProfile))
insulinEffectResistanceAdjusted = main.build_resistance_profile(trending, insulinEffectUpdated[len(insulinEffectUpdated) - 1])
print(bolusProfile)
root.destroy()
main.show_adjusted_graph(insulinEffectResistanceAdjusted)
try:
bolus = int(bolus_var.get())
print(bolus)
if 0 <= bolus <= 10:
bolusProfile.update({time: bolus})
insulinEffectUpdated.append(main.bolusStack(insulinEffectUpdated[len(insulinEffectUpdated) - 1], bolusProfile))
insulinEffectResistanceAdjusted = main.build_resistance_profile(trending, insulinEffectUpdated[len(insulinEffectUpdated) - 1])
print(bolusProfile)
root.destroy()
main.show_adjusted_graph(insulinEffectResistanceAdjusted)
else:
bolus_label.config(text="Invalid input\nPlease enter a bolus between 0 and 10:")
except ValueError:
bolus_label.config(text="Invalid input\nPlease enter an integer between 0 and 10:")
# function to update bolus profile label
def change_profile_text(bolusprofile_label):
print(bolusProfile)
profile_string = ""
for key in bolusProfile:
if len(key) == 3:
make_str_readable = "0"
else:
make_str_readable = ""
profile_string = profile_string + str(bolusProfile[key]) + " Unit(s) in " + str(key[0]) + " hours and " + str(key[2:]) + make_str_readable + " minutes\n"
bolusprofile_label.config(text = f"Current Bolus Profile: {profile_string}")
# creating a label for
# disclaimer
disclaimer_label = tk.Label(f1, text = "THIS PROGRAM IS CURRENTLY FOR MODELING PURPOSES ONLY\nAND SHOULD NOT BE USED TO DETERMINE\nHOW TO MANAGE AN INDIVIDUAL'S BLOOD GLUCOSE.",
font=('calibre',
10, 'bold'))
# creating a label for
# disclaimer cont'd
disclaimer_label_cont = tk.Label(f1, text = "",
font=('calibre',
10, 'bold'))
# creating a label for
# disclaimer cont'd 2
disclaimer_label_cont2 = tk.Label(f1, text = "",
font=('calibre',
10, 'bold'))
# creating a label for
# bg using widget Label
bg_label = tk.Label(f1, text = 'Blood Glucose',
font=('calibre',
10, 'bold'))
# creating an entry for input
# bg using widget Entry
bg_entry = tk.Entry(f1,
textvariable = bg_var,
font=('calibre',10,'normal'))
choices = { 'Stable','Rising','Dropping','Rising Fast','Dropping Fast', 'Unknown'}
trending_var.set('Unknown') # set the default option
trending_profile = tk.OptionMenu(f1, trending_var, *choices,)
trending_label = tk.Label(f1, text="Choose a Trending Option")
trending_profile.grid(row = 2, column =1)
# on change dropdown value
def change_dropdown(*args):
print( trending_var.get() )
# link function to change dropdown
trending_var.trace('w', change_dropdown)
# creating a button using the widget
# Button that will call the submit function
sub_btn=tk.Button(f1,text = 'Submit',
command = lambda:[submit_f1()]
)
# creating a label for f2
f2_label = tk.Label(f2, text = "Please enter an initial bolus value",
font=('calibre',
10, 'bold'))
# creating a label for bolus
bolus_label = tk.Label(f2,
text = 'Bolus Amount',
font = ('calibre',10,'bold'))
# creating an entry for bolus
bolus_entry=tk.Entry(f2,
textvariable = bolus_var,
font = ('calibre',10,'normal'))
# creating a button using the widget
# Button that will call the submit function for f2
sub2_btn=tk.Button(f2,text = 'Apply',
command = lambda:[submit_f2()])
# creating a button using the widget
# Button that will call the submit function for f2
add_bolus_btn=tk.Button(f2,text = 'Add another bolus',
command = lambda:[add_another_bolus(), change_profile_text(bolusprofile_label)])
# creating a label for f3
f3_label = tk.Label(f3, text = "Please enter another bolus value with a time to be applied",
font=('calibre',
10, 'bold'))
# creating a label for subsequent bolus
bolus_label2 = tk.Label(f3,
text = 'Bolus Amount',
font = ('calibre',10,'bold'))
# creating an entry for subsequent bolus
bolus_entry2=tk.Entry(f3,
textvariable = bolus_var,
font = ('calibre',10,'normal'))
# creating a label for f3
bolusprofile_label = tk.Label(f3, text = "",
font=('calibre',
10, 'bold'))
times = SortedSet()
for el in main.insulinProfileList:
times.add(el[0])
bolus_time = tk.OptionMenu(f3, bolusTime_var, *times,)
time_label = tk.Label(f3, text="Choose a time to apply bolus\nin 5 minute increments from now")
bolus_time.grid(row = 2, column =1)
# on change dropdown value
def change_time_dropdown(*args):
print( "time", bolusTime_var.get() )
# link function to change dropdown
bolusTime_var.trace('w', change_time_dropdown)
# creating a button using the widget
# Button that will call the submit function for f2
sub3_btn=tk.Button(f3,text = 'Apply',
command = submit_f3)
# creating a button using the widget
# Button that will call the submit function for f3
add_bolus_btn2=tk.Button(f3,text = 'Add another bolus',
command = lambda:[add_another_bolus2(), change_profile_text(bolusprofile_label)]
)
# placing the label and entry in
# the required position using grid for f1
disclaimer_label.grid(row=0,column=0,columnspan=2,padx=20,pady=20)
bg_label.grid(row=1,column=0,padx=20,pady=20)
bg_entry.grid(row=1,column=1)
trending_label.grid(row=2,column=0,padx=20,pady=20)
trending_profile.grid(row=2,column=1)
sub_btn.grid(row=3,column=1,padx=20,pady=20)
# placing the label and entry in
# the required position using grid for f2
f2_label.grid(row=0,column=0,columnspan=2,padx=20,pady=20)
bolus_label.grid(row=1,column=0,padx=20,pady=20)
bolus_entry.grid(row=1,column=1)
sub2_btn.grid(row=3,column=0,padx=20,pady=20)
add_bolus_btn.grid(row=3,column=1)
# placing the label and entry in
# the required position using grid for f3
f3_label.grid(row=0,column=0,columnspan=2,padx=20,pady=20)
bolusprofile_label.grid(row=1,column=0,columnspan=2,padx=20,pady=20)
bolus_label2.grid(row=2,column=0,padx=20,pady=20)
bolus_entry2.grid(row=2,column=1)
time_label.grid(row=3,column=0,padx=20,pady=20)
bolus_time.grid(row=3,column=1)
sub3_btn.grid(row=4,columnspan=2,padx=20,pady=20)
add_bolus_btn2.grid(row=5,columnspan=2)
# performing an infinite loop
# for the window to display
raise_frame(f1)
root.mainloop() | 3.078125 | 3 |
prog_praxis/kaprekar_169.py | genos/online_problems | 1 | 12772707 | #!/usr/bin/env python
def num_digits(k):
c = 0
while k > 0:
c += 1
k /= 10
return c
def is_kaprekar(k):
k2 = pow(k, 2)
p10ndk = pow(10, num_digits(k))
if k == (k2 // p10ndk) + (k2 % p10ndk):
return True
else:
return False
if __name__ == '__main__':
for k in range(1, 1001):
if is_kaprekar(k): print(k)
| 3.796875 | 4 |
clan_blog/routes.py | Claire-Clan/clan-site | 0 | 12772708 | <reponame>Claire-Clan/clan-site<filename>clan_blog/routes.py
""" Set default routes for app """
from flask import current_app as app, db
from flask import render_template, url_for
@app.route('/')
def home():
return render_template('index.html')
@app.route('/login', methods=['GET'])
def login():
return render_template('login.html') # Incomplete
| 2.3125 | 2 |
hathor/conf/testnet.py | khengleng/khathor | 0 | 12772709 | from hathor.conf.settings import HathorSettings
SETTINGS = HathorSettings(
P2PKH_VERSION_BYTE=b'\x49',
MULTISIG_VERSION_BYTE=b'\x87',
NETWORK_NAME='testnet-foxtrot',
BOOTSTRAP_DNS=['foxtrot.testnet.hathor.network'],
# Genesis stuff
GENESIS_OUTPUT_SCRIPT=bytes.fromhex('76a914a584cf48b161e4a49223ed220df30037ab740e0088ac'),
GENESIS_TIMESTAMP=1577836800,
GENESIS_BLOCK_NONCE=826272,
GENESIS_BLOCK_HASH=bytes.fromhex('0000033139d08176d1051fb3a272c3610457f0c7f686afbe0afe3d37f966db85'),
GENESIS_TX1_NONCE=190,
GENESIS_TX1_HASH=bytes.fromhex('00e161a6b0bee1781ea9300680913fb76fd0fac4acab527cd9626cc1514abdc9'),
GENESIS_TX2_NONCE=115,
GENESIS_TX2_HASH=bytes.fromhex('00975897028ceb037307327c953f5e7ad4d3f42402d71bd3d11ecb63ac39f01a'),
# tx weight parameters. With these settings, tx weight is always 8
MIN_TX_WEIGHT_K=0,
MIN_TX_WEIGHT_COEFFICIENT=0,
MIN_TX_WEIGHT=8,
)
| 1.539063 | 2 |
sample_register/sample_register/page/jobboard/jobboard.py | TRUFIL/sampreg | 0 | 12772710 | <reponame>TRUFIL/sampreg
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr,now,add_days
import json
import datetime
@frappe.whitelist()
def get_items(sales_order):
print "\n\nser_____",sales_order
dl = frappe.db.sql("""select sales_order from `tabService Request` where name='{0}'""".format(sales_order),as_list=1)
so = dl[0][0]
return {
"get_items": frappe.db.sql("""select soi.item_code, i.test_type from `tabSales Order Item` soi, `tabItem` i
where soi.item_code=i.item_code and soi.parent = '{0}'
UNION ALL select pi.item_code,item.test_type from `tabPacked Item` pi, tabItem item where pi.item_code=item.item_code and
pi.parent = '{0}'
""".format(so), as_list=1)
}
@frappe.whitelist()
def get_sales_order():
return {
"get_sales_order": frappe.db.sql("""select name from `tabSales Order`""", as_list=1)
}
@frappe.whitelist()
def get_sample_data(sales_order):
print "in sample get get_sample_data"
print sales_order
return {
"get_sample_data": frappe.db.sql("""select false, name, customer, type, priority, standards, sales_order, test_group,
order_id,
case when 5!=6 then (select sales_order from `tabService Request` where name=order_id)
ELSE ""
END AS 'sales_order'
from `tabSample Entry Register` where job_card_status="Not Available" and docstatus = 1 and order_id = '{0}' order by name""".format(sales_order), as_list=1)
}
@frappe.whitelist()
def get_sample_data_with_job():
return {
"get_sample_data": frappe.db.sql("""select false, name, customer, type, priority, standards, test_group,order_id,
case when 5!=6 then (select sales_order from `tabService Request` where name=order_id)
ELSE ""
END AS 'sales_order'
from `tabSample Entry Register` where job_card_status!="" order by name""", as_list=1)
}
@frappe.whitelist()
def get_test_data(test_group):
return {
"get_test_data": frappe.db.sql("""select name from `tabTest Name` where test_group='%s' order by name"""%(test_group), as_list=1)
}
@frappe.whitelist()
def create_job_card(selectedData,test_list_unicode):
print test_list_unicode
test_list=json.loads(test_list_unicode)
# for test in test_list:
# frappe.msgprint("test is: "+ test)
# frappe.msgprint(test_list[0])
# print type(selectedData)
# frappe.msgprint("Job Card "+bank.name+" created successfuly for : "+test_group);
#frappe.msgprint(type(selectedData))
# print selectedData
# frappe.msgprint(selectedData)
selectedData_json = json.loads(selectedData)
print test_list
for r in selectedData_json:
doc_job_card_creation=frappe.new_doc("Job Card Creation")
doc_job_card_creation.sample_id = r.get("sampleid")
doc_job_card_creation.customer = r.get("customer")
doc_job_card_creation.type = r.get("type")
doc_job_card_creation.priority = r.get("priority")
doc_job_card_creation.standards = r.get("standard")
doc_job_card_creation.creation_date = datetime.datetime.today()
for test_type in test_list:
test_req={
"doctype": "Job Card Creation Test Details",
"item_code":test_type,
"item_name":frappe.db.get_value("Item", test_type, "item_name"),
"test_type": frappe.db.get_value("Item", test_type, "test_type"),
"test_group": frappe.db.get_value("Item", test_type, "test_group"),
}
doc_job_card_creation.append("test_details",test_req)
doc_job_card_creation.save()
sample_link="<a href='desk#Form/Sample Entry Register/"+r.get("sampleid")+"'>"+r.get("sampleid")+" </a>"
job_link="<a href='desk#Form/Job Card Creation/"+doc_job_card_creation.name+"'>"+doc_job_card_creation.name+" </a>"
frappe.msgprint("Job Card "+job_link+" is created successfuly for sample : "+sample_link);
@frappe.whitelist()
def set_sample_data(priority,standards,selectedData):
selectedData_json = json.loads(selectedData)
for r in selectedData_json:
if r.get("sampleid"):
sample_entry_doc=frappe.get_doc("Sample Entry Register",r.get("sampleid"))
sample_entry_doc.priority = priority
sample_entry_doc.standards = standards
sample_entry_doc.save()
@frappe.whitelist()
def set_priority_data(priority,selectedData):
selectedData_json = json.loads(selectedData)
for r in selectedData_json:
if r.get("sampleid"):
sample_entry_doc=frappe.get_doc("Sample Entry Register",r.get("sampleid"))
sample_entry_doc.priority = priority
sample_entry_doc.save()
@frappe.whitelist()
def set_standards_data(standards,selectedData):
selectedData_json = json.loads(selectedData)
for r in selectedData_json:
if r.get("sampleid"):
sample_entry_doc=frappe.get_doc("Sample Entry Register",r.get("sampleid"))
sample_entry_doc.standards = standards
sample_entry_doc.save()
| 2.28125 | 2 |
autumn/projects/covid_19/vaccine_optimisation/sample_code.py | emmamcbryde/AuTuMN-1 | 14 | 12772711 | from autumn.projects.covid_19.vaccine_optimisation.vaccine_opti import (
get_decision_vars_names,
initialise_opti_object,
)
import numpy as np
import yaml
COUNTRY = "malaysia" # should use "malaysia" or "philippines"
def run_sample_code():
# Initialisation of the optimisation object. This needs to be run once before optimising.
opti_object = initialise_opti_object(COUNTRY)
# Create decision variables for random allocations and random relaxation
decision_vars = []
for phase_number in range(2):
sample = list(np.random.uniform(low=0.0, high=1.0, size=(8,)))
_sum = sum(sample)
decision_vars += [s / _sum for s in sample]
decision_vars.append(np.random.uniform(low=0.0, high=1.0))
# create_scenario_yml_file(COUNTRY, decision_vars, sc_index=6)
# Evaluate objective function
[total_deaths, max_hospital, relaxation] = opti_object.evaluate_objective(decision_vars)
# Print decision vars and outputs
print(get_decision_vars_names())
print(f"Decision variables: {decision_vars}")
print(f"N deaths: {total_deaths} / Max hospital: {max_hospital} / Relaxation: {relaxation}")
def dump_decision_vars_sample(n_samples):
decision_vars_sample = []
for i in range(n_samples):
decision_vars = []
for phase_number in range(2):
sample = list(np.random.uniform(low=0.0, high=1.0, size=(8,)))
_sum = sum(sample)
decision_vars += [s / _sum for s in sample]
decision_vars.append(float(np.random.uniform(low=0.0, high=1.0)))
decision_vars = [float(v) for v in decision_vars]
decision_vars_sample.append(decision_vars)
file_path = "comparison_test/vars_sample.yml"
with open(file_path, "w") as f:
yaml.dump(decision_vars_sample, f)
def evaluate_sample_decision_vars(user="Guillaume"):
file_path = "comparison_test/vars_sample.yml"
with open(file_path) as file:
vars_samples = yaml.load(file)
opti_object = initialise_opti_object(COUNTRY)
dumped_dict = {"deaths": [], "hosp": []}
for decision_vars in vars_samples:
[total_deaths, max_hospital, _] = opti_object.evaluate_objective(decision_vars)
dumped_dict["deaths"].append(float(total_deaths))
dumped_dict["hosp"].append(float(max_hospital))
file_path = f"comparison_test/obj_values_{user}.yml"
with open(file_path, "w") as f:
yaml.dump(dumped_dict, f)
def compare_outputs():
outputs = {}
for name in ["Romain", "Guillaume"]:
file_path = f"comparison_test/obj_values_{name}.yml"
with open(file_path) as file:
outputs[name] = yaml.load(file)
for obj in ["deaths", "hosp"]:
perc_diff = [
int(
100
* (outputs["Guillaume"][obj][i] - outputs["Romain"][obj][i])
/ outputs["Romain"][obj][i]
)
for i in range(len(outputs["Romain"][obj]))
]
average_perc_diff = sum(perc_diff) / len(perc_diff)
print(f"Comparison for {obj}:")
print("Percentage difference (ref. Romain):")
print(perc_diff)
print(f"Average perc diff: {average_perc_diff}%")
for name in ["Romain", "Guillaume"]:
x = outputs[name][obj]
ordered_output = sorted(x)
ranks = [ordered_output.index(v) for v in x]
print(f"Ranks {name}:")
print(ranks)
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print()
# evaluate_sample_decision_vars("Guillaume")
# compare_outputs()
# This can be run using:
# python -m apps runsamplevaccopti
| 2.78125 | 3 |
core/auth/viewsets.py | bharathjinka09/django-react-auth-backend | 0 | 12772712 | from rest_framework.response import Response
from rest_framework_simplejwt.views import TokenObtainPairView
from rest_framework.viewsets import ModelViewSet
from rest_framework.permissions import AllowAny
from rest_framework import status
from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
from core.auth.serializers import LoginSerializer, RegisterSerializer
class LoginViewSet(ModelViewSet, TokenObtainPairView):
serializer_class = LoginSerializer
permission_classes = (AllowAny,)
http_method_names = ['post']
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
try:
serializer.is_valid(raise_exception=True)
except TokenError as e:
raise InvalidToken(e.args[0])
return Response(serializer.validated_data, status=status.HTTP_200_OK)
class RegistrationViewSet(ModelViewSet, TokenObtainPairView):
serializer_class = RegisterSerializer
permission_classes = (AllowAny,)
http_method_names = ['post']
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.save()
refresh = RefreshToken.for_user(user)
res = {
"refresh": str(refresh),
"access": str(refresh.access_token),
}
return Response({
"user": serializer.data,
"refresh": res["refresh"],
"token": res["access"]
}, status=status.HTTP_201_CREATED)
| 2.09375 | 2 |
toil/script/toil_config.py | dhutty/toil-py | 0 | 12772713 | #! python
import logging
import os.path
import sys
import traceback
import toil.config
import toil.parm
import toil.parm.parse
import toil.framework
import toil.util.decorator
from toil.batch.base import BaseBatch
logging.basicConfig(format='%(asctime)s %(levelname)s: %(name)s %(message)s', level=logging.ERROR)
logger = logging.getLogger(__name__)
class Batch(BaseBatch):
def create_toil(self):
# require a config file to be passed in as parameter
args = toil.parm.parse.handle_parms(optional=['create', 'create_key', 'encrypt', 'decrypt', 'init'])
# require config file, encyyption key and initialization vector
# args = parm.handle.handle_parms(['c', 'k', 'i'])
logger.debug(args)
return toil.framework.create(**args)
@toil.util.decorator.timeit(loops=1)
def execute(self, framework):
logger.info('execute')
logger.debug(framework.args)
performed_config = False
if framework.args['init'] is not None:
self.toil_init(framework, framework.args['init'])
performed_config = True
if framework.args['create'] is not None:
self.create_config(framework, framework.args['create'])
performed_config = True
if framework.args['create_key'] is not None:
self.create_encryption_key(framework, framework.args['create_key'])
performed_config = True
if framework.args['encrypt'] is not None:
self.encrypt(framework, framework.args['encrypt'])
performed_config = True
if framework.args['decrypt'] is not None:
self.decrypt(framework, framework.args['decrypt'])
performed_config = True
if not performed_config:
usage = """
usage: toil-config [--init CONFIG_DIR_NAME]
create directory, create config.json, create key
[--create FILE_NAME]
create config file with file name
[--create_key FILE_NAME]
create encryption key with file name
[--encrypt CONFIG_FILE_NAME]
encrypt config file
[--decrypt CONFIG_FILE_NAME]
decrypt config file
To get started try this:
toil-config --init /path/.toil
creates a config.json file in your directory and an encryption key
toil-config -k /path/.toil/key --encrypt /path/.toil/config.json
create the file /path/.toil/config.json.encrypted where all values are encrypted
toil-config -k /path/.toil/key --decrypt /Users/aclove/.toil/config.json.encrypted
create the file /path/.toil/config.json.encrypted.decrypted where all values are decrypted
"""
print(usage)
def toil_init(self, framework, dir_name):
self.create_config(framework, dir_name + '/config.json')
self.create_encryption_key(framework, dir_name + '/key')
def create_config(self, framework, file_name):
try:
if os.path.isfile(file_name):
print('The file {0} already exists'.format(file_name))
else:
toil.config.util.generate_config_file(file_name)
print('created {0}'.format(file_name))
except Exception as ex:
logger.error(ex)
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
logger.error(message)
traceback.print_exc(file=sys.stdout)
def create_encryption_key(self, framework, file_name):
try:
if os.path.isfile(file_name):
print('The file {0} already exists'.format(file_name))
else:
key = framework.encryptor.generate_key(file_name)
print('created {0} with value {1}'.format(file_name, key))
except Exception as ex:
logger.error(ex)
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
logger.error(message)
traceback.print_exc(file=sys.stdout)
def encrypt(self, framework, file_name):
logger.info('execute')
try:
framework.encrypt_config_file(file_name, file_name + '.encrypted')
except Exception as ex:
logger.error(ex)
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
logger.error(message)
traceback.print_exc(file=sys.stdout)
def decrypt(self, framework, file_name):
logger.info('execute')
try:
framework.decrypt_config_file(file_name, file_name + '.decrypted')
except Exception as ex:
logger.error(ex)
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
logger.error(message)
traceback.print_exc(file=sys.stdout)
def main():
Batch().run()
| 2.1875 | 2 |
app/dashboard/migrations/0016_auto_20201119_1901.py | pnsn/squac_api | 6 | 12772714 | <gh_stars>1-10
# Generated by Django 3.1.2 on 2020-11-19 19:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0015_widget_color_palet'),
]
operations = [
migrations.RemoveField(
model_name='widget',
name='color_palet',
),
migrations.AddField(
model_name='widget',
name='color_pallet',
field=models.CharField(choices=[('squac', 'Squac')], default='squac', max_length=24),
),
]
| 1.765625 | 2 |
citerate/citerate.py | markmelnic/citerate | 0 | 12772715 | <filename>citerate/citerate.py
def citerator(data: list, x: int, y: int, layer = False) -> list:
"""Bi-dimensional matrix iterator starting from any point (i, j),
iterating layer by layer around the starting coordinates.
Args:
data (list): Data set to iterate over.
x (int): X starting coordinate.
y (int): Y starting coordinate.
Optional args:
layered (bool): Yield value by value or entire layer.
Yields:
value: Layer value.
list: Matrix layer.
"""
if layer:
yield [data[x][y]]
else:
yield data[x][y]
for depth in range(len(data)):
l = []
# top row
wpos = x - depth - 1
for i in range(y - depth - 1, y + depth + 1):
if (not (i < 0
or wpos < 0
or i >= len(data)
or wpos >= len(data))
and not (wpos >= len(data)
or i >= len(data[wpos]))):
l.append(data[wpos][i])
# right column
hpos = y + depth + 1
for i in range(x - depth - 1, x + depth + 1):
if (not (i < 0
or hpos < 0
or i >= len(data)
or hpos >= len(data))
and not (hpos >= len(data)
or hpos >= len(data[i]))):
l.append(data[i][hpos])
# bottom row
wpos = x + depth + 1
for i in reversed(range(y - depth, y + depth + 2)):
if (not (i < 0
or wpos < 0
or i >= len(data)
or wpos >= len(data))
and not (wpos >= len(data)
or i >= len(data[wpos]))):
l.append(data[wpos][i])
# left column
hpos = y - depth - 1
for i in reversed(range(x - depth, x + depth + 2)):
if (not (i < 0
or hpos < 0
or i >= len(data)
or hpos >= len(data))
and not (hpos >= len(data)
or hpos >= len(data[i]))):
l.append(data[i][hpos])
if l:
if layer:
yield l
else:
for v in l:
yield v
else:
break
| 3.484375 | 3 |
src/nitpick/plugins/info.py | finswimmer/nitpick | 4 | 12772716 | """Info needed by the plugins."""
from dataclasses import dataclass, field
from typing import Set
from identify import identify
from nitpick.constants import DOT
from nitpick.exceptions import Deprecation
from nitpick.project import Project
@dataclass
class FileInfo:
"""File information needed by the plugin."""
project: Project
path_from_root: str
tags: Set[str] = field(default_factory=set)
@classmethod
def create(cls, project: Project, path_from_root: str) -> "FileInfo":
"""Clean the file name and get its tags."""
if Deprecation.pre_commit_without_dash(path_from_root):
clean_path = DOT + path_from_root
else:
clean_path = DOT + path_from_root[1:] if path_from_root.startswith("-") else path_from_root
tags = set(identify.tags_from_filename(clean_path))
return cls(project, clean_path, tags)
| 2.28125 | 2 |
program/client/net_client.py | Addision/ProtoExcelTool | 0 | 12772717 | <reponame>Addision/ProtoExcelTool
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@File : client.py
@Time : 2022/03/08 22:05:10
@Author : felix
@Version : 1.0
@Contact : <EMAIL>
@License : (C)Copyright 2021-2025, felix&lai
@Desc : 协议测试客户端
'''
# here put the import lib
from time import sleep
from client.session import *
from client.data_pack import *
from client.user import *
from proto_xml import *
from google.protobuf import json_format
import codecs
import requests
import json
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
class NetClient(QMainWindow):
# 显示请求返回的消息
ShowMsgSignal = pyqtSignal(str, str)
# 通知界面重新连接其他服务器
ConnServerSignal = pyqtSignal(str, int)
def __init__(self, parent=None):
super(NetClient, self).__init__()
self.protoXml = ToolProtoXml()
self.dev_login_url = 'http://192.168.50.78:9999/sys/login'
self.pro_login_url = ''
self.user = User()
self.session = Session()
self.dataPack = DataPack()
self.msgDefine = {}
self.recordReq = {}
self.req_history_file = "./config/request.json"
self.loadReqHistory()
pass
def loadReqHistory(self):
try:
content = ""
with codecs.open(self.req_history_file, 'r', encoding='GB2312') as f:
content = f.read()
pass
self.recordReq = json.loads(content)
except Exception as e:
print(e)
pass
def getReqHistory(self, msgID):
if msgID in self.recordReq.keys():
return self.recordReq[msgID]
else:
return None
def connect(self, ip, port):
if self.session.conn_server(ip, port):
print("conn server({0}:{1}) ok".format(ip, port))
self.startRespShowThrd()
return True
return False
def disconnect(self):
try:
self.session.close()
self.isDisconn = True
self.showRespInfoThrd.join()
except Exception as e:
pass
pass
def sendMsg(self, msgID, msgClass, msgName, content):
try:
msgProto = self.dataPack.getMsgProto(msgClass, msgName)
if not msgProto:
return
if not content:
content = self.recordReq[msgID]
request = json_format.Parse(content, msgProto)
nMsgID = int(msgID)
# 登录验证消息特殊处理
# if nMsgID == 7056:
# if not self.webVerifyLogin():
# print("client login game error")
# return
# else:
# request.token = self.user.token
# 打包发送消息
msg = self.dataPack.dataPack2(nMsgID, request)
if self.session.writeData(msg):
print("send msg({0}) ok".format(msgID))
else:
print("send msg({0}) error".format(msgID))
# 记录发送消息
self.recordReq[msgID] = content
#print('send msg:{0}, content:{1}'.format(msgID, content))
pass
except Exception as e:
Logger.WriteLog("send msg failed: {0}".format(str(e)))
print("send msg failed, ", e)
# 记录发送历史
def saveSendHistory(self):
try:
with codecs.open(self.req_history_file, 'w+', encoding='GB2312') as f:
jsonStr = json.dumps(
self.recordReq, indent=4, sort_keys=False, ensure_ascii=False)
f.write(jsonStr + '\n')
pass
except Exception as e:
print(e)
pass
# 请求web端进行登录验证
def webVerifyLogin(self):
try:
postdata = {
'username': self.user.username,
'password': <PASSWORD>,
'type': 'game',
}
headers = {
'Content-Type': 'application/json; charset=UTF-8',
'tenant-id':'d59de7b24a9f11eca01000163e144cbe,ff51a4b4f7f167fedb7e51f451270f99',
}
resp = requests.post(self.dev_login_url, json=postdata, headers=headers)
if resp.status_code == 200:
respDict = resp.json()
result = respDict['result']
self.user.userInfo = result['userInfo']
self.user.token = result['token']
self.user.inviteCode = result['InvitationCodeStatus']
self.user.wallets = result['wallets']
else:
print("http post error, code=", resp.status_code)
return False
return True
except Exception as e:
print(e)
return False
def startRespShowThrd(self):
# 开启一个线程用于显示服务器返回消息
self.isDisconn = False
self.showRespInfoThrd = threading.Thread(target=self.startRespShow)
self.showRespInfoThrd.start()
pass
def startRespShow(self):
# 从session的返回消息队列里拿出数据并显示到界面上
while True:
try:
if self.isDisconn:
return
msg = self.session.queue.get(timeout = 1) # 1秒以后没有就抛出异常
if msg is not None:
print("recv msgid:{0}".format(msg[0]))
self.ShowMsgSignal.emit(str(msg[0]), msg[1])
# if str(msg[0]) == '7000':
# msgDict = json.loads(msg[1])
# ip = msgDict['gwHostname']
# port = int(msgDict['gwPort'])
# self.ConnServerSignal.emit(ip, port)
# pass
pass
except Exception as e:
pass
pass
| 2.140625 | 2 |
src/go/Google_gcj_tools/lib/code_jam_login.py | veltzerdoron/GCJ | 0 | 12772718 | <filename>src/go/Google_gcj_tools/lib/code_jam_login.py
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module exposes one public function Login(), that given the password of
the Code Jam contestant should log him in and return a cookie."""
from datetime import datetime, timedelta
import BaseHTTPServer
import getpass
import httplib
import json
import sys
import time
import urllib2
from src.go.lib import constants
from src.go.lib import data_manager
from src.go.lib import error
from src.go.lib import google_login
from src.go.lib import http_interface
def _GetUserPassword(user, contest_data):
"""Get the user password from a possible input string, a value inside the
configuration file or directly from the user.
Args:
user: Name of the user whose password must be retrieved.
contest_data: Data of the actual contest in a Python dict.
Returns:
The password of the specified user. This password might come from the
configuration file or the standard input.
"""
# Retrieve the password from the contest data if it is there.
if 'password' in contest_data:
return contest_data['password']
# No password was specified in the command line or the configuration file,
# read it from the user.
print 'Cannot find password for user {0}.'.format(user)
return getpass.getpass()
def _MakeLogin(host, user, password):
"""Retrieve the authentication token and cookie from the code jam server,
using the given user and password to authenticate.
Args:
host: Name of the host that runs the competition.
user: User to authenticate in the Code Jam servers.
password: <PASSWORD>.
Returns:
A tuple with the authentication token and the login cookie.
Raises:
error.AuthenticationError: If the server answers with an authentication
error, as specified in the GoogleLogin protocol.
error.NetworkError: If there was a problem while communicating with the
server.
"""
try:
# Get the authentication token and gae cookie using the GoogleLogin module.
sys.stdout.write('Logging into "{0}" with user "{1}"...\n'.format(
host, user))
application_name = 'gcj_commandline-{0}'.format(constants.VERSION)
auth_token, cookie = google_login.Login(
host, 'HOSTED_OR_GOOGLE', user, password, 'ah', application_name, False)
sys.stdout.write('Successfully logged into "{0}" with user "{1}".\n'.format(
host, user))
return auth_token, cookie
except google_login.AuthenticationError as e:
# Return a exception with a human-readable error based on the error and exit
# with an error code.
if e.reason == 'BadAuthentication':
raise error.AuthenticationError('Invalid username or password.\n')
elif e.reason == 'CaptchaRequired':
raise error.AuthenticationError(
'Please go to https://www.google.com/accounts/DisplayUnlockCaptcha '
'and verify you are a human. Then try again.\n')
elif e.reason == 'NotVerified':
raise error.AuthenticationError('Account not verified.')
elif e.reason == 'TermsNotAgreed':
raise error.AuthenticationError('User has not agreed to TOS.')
elif e.reason == 'AccountDeleted':
raise error.AuthenticationError('The user account has been deleted.')
elif e.reason == 'AccountDisabled':
raise error.AuthenticationError('The user account has been disabled.')
elif e.reason == 'ServiceDisabled':
raise error.AuthenticationError('The user\'s access to the service has '
'been disabled.')
elif e.reason == 'ServiceUnavailable':
raise error.AuthenticationError('The service is not available, try again '
'later.')
else:
raise error.AuthenticationError('Unrecognized authentication error. '
'Reason: %s' % e.reason)
except urllib2.HTTPError as e:
explanation = BaseHTTPServer.BaseHTTPRequestHandler.responses[e.code][0]
raise error.NetworkError('HTTP error while logging into the Google Code '
'Jam server ({0}): {1}\n'.format(e.code,
explanation))
def _GetMiddlewareTokens(host, cookie):
"""Get needed middleware tokens for the specified host.
Args:
host: Host where the contest is running.
cookie: Cookie that the user received when authenticating.
Returns:
A tuple two elements: a dictionary containing all the middleware tokens,
and the tokens expiration date.
Raises:
error.NetworkError: If a network error occurs while communicating with the
server.
error.ServerError: If the server answers code distinct than 200 or the
response is a malformed JSON.
"""
# Send an HTTP request to get the problem list from the server.
sys.stdout.write('Getting middleware tokens from "{0}"...\n'.format(host))
request_referer = 'http://{0}/codejam'.format(host)
request_arguments = {
'cmd': 'GetMiddlewareTokens',
'actions': 'GetInitialValues,GetInputFile,GetUserStatus,SubmitAnswer',
}
request_headers = {
'Referer': request_referer,
'Cookie': cookie,
}
try:
status, reason, response = http_interface.Get(
host, '/codejam/middleware', request_arguments, request_headers)
except httplib.HTTPException as e:
raise error.NetworkError('HTTP exception while retrieving middleware '
'tokens from the Google Code Jam server: '
'{0}\n'.format(e))
# Check if the status is not good.
if status != 200 or reason != 'OK':
raise error.ServerError('Error while communicating with the server, cannot '
'get middleware tokens. Check that the host, '
'username and contest id are valid.\n')
# Extract token information from server response.
try:
tokens_info = json.loads(response)
return tokens_info['tokens'], tokens_info['expire']
except (KeyError, ValueError) as e:
raise error.ServerError('Invalid response received from the server, cannot '
'initialize contest. Check that the contest id is '
'valid: {0}.\n'.format(e))
def _UTCToLocalDatetime(date_string, input_format, output_format=None):
"""Convert a date string to UTC into a possible different format.
Args:
date_string: String with the datetime to be converted.
input_format: Format used to parse date_string, as expected by strptime.
output_format: Format used to format the converted datetime. If None,
input_format is used.
Returns:
The date string converted to UTC and formatted with output_format (or
input_format if output_format is None).
"""
if output_format is None:
output_format = input_format
parsed_utc_datetime = datetime.strptime(date_string, input_format)
offset = time.altzone if time.daylight else time.timezone
local_datetime = parsed_utc_datetime - timedelta(seconds=offset)
return local_datetime.strftime(output_format)
def Login(password=None):
"""Renew contest cookie for the specified user in the host.
Args:
password: Password of the <PASSWORD>. If None, then the password
will be retrieved using the GetUserPassword() function.
Returns:
The new contest cookie for the contestant.
Raises:
error.ConfigurationError: If the configuration file is missing or
incomplete.
"""
# Read the current configuration file and extract the host and username.
try:
contest_data = data_manager.ReadData()
host = contest_data['host']
user = contest_data['user']
except KeyError as e:
raise error.ConfigurationError('No host or username was found in the user '
'configuration file: {0}.\n'.format(e))
# Retrieve the password from elsewhere, as the user didn't provide one.
if password is None:
password = _GetUserPassword(user, contest_data)
# Log in into Google servers using ClientLogin and show the cookie expiration
# date in localtime.
cookie = _MakeLogin(host, user, password)[1]
cookie_expiration_time = google_login.GetCookieExpirationTime(cookie)
if cookie_expiration_time is not None:
sys.stdout.write('Login cookie will expire at {0}.\n'.format(
_UTCToLocalDatetime(cookie_expiration_time,
'%a, %d-%b-%Y %H:%M:%S %Z',
'%Y-%m-%d %H:%M:%S')))
else:
sys.stdout.write('Login cookie expiration time not found.\n')
# Get new middleware tokens and show the expiration date in localtime.
middleware_tokens, tokens_expiration_time = _GetMiddlewareTokens(host, cookie)
sys.stdout.write('Middleware tokens will expire at {0}.\n'.format(
_UTCToLocalDatetime(tokens_expiration_time, '%Y-%m-%d %H:%M:%S')))
# Store cookie and middleware tokens in the current configuration file.
contest_data['cookie'] = cookie
contest_data['middleware_tokens'] = middleware_tokens
contest_data['tokens_expiration_time'] = tokens_expiration_time
# Finally, write the contest data to the current data file and return the
# cookie.
data_manager.WriteData(contest_data)
return cookie
| 2.59375 | 3 |
tests/python/frontend/mxnet/model_zoo/squeezenet.py | XiaoSong9905/tvm | 4,640 | 12772719 | <reponame>XiaoSong9905/tvm
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Symbol of SqueezeNet
Reference:
Iandola, <NAME>., et al.
"Squeezenet: Alexnet-level accuracy with 50x fewer parameters and< 0.5 mb model size." (2016).
"""
import mxnet as mx
# Helpers
def _make_fire(net, squeeze_channels, expand1x1_channels, expand3x3_channels):
net = _make_fire_conv(net, squeeze_channels, 1, 0)
left = _make_fire_conv(net, expand1x1_channels, 1, 0)
right = _make_fire_conv(net, expand3x3_channels, 3, 1)
# NOTE : Assume NCHW layout here
net = mx.sym.concat(left, right, dim=1)
return net
def _make_fire_conv(net, channels, kernel_size, padding=0):
net = mx.sym.Convolution(
net, num_filter=channels, kernel=(kernel_size, kernel_size), pad=(padding, padding)
)
net = mx.sym.Activation(net, act_type="relu")
return net
# Net
def get_symbol(num_classes=1000, version="1.0", **kwargs):
"""Get symbol of SqueezeNet
Parameters
----------
num_classes: int
The number of classification results
version : str, optional
"1.0" or "1.1" of SqueezeNet
"""
assert version in [
"1.0",
"1.1",
], "Unsupported SqueezeNet version {version}:" "1.0 or 1.1 expected".format(version=version)
net = mx.sym.Variable("data")
if version == "1.0":
net = mx.sym.Convolution(net, num_filter=96, kernel=(7, 7), stride=(2, 2), pad=(3, 3))
net = mx.sym.Activation(net, act_type="relu")
net = mx.sym.Pooling(data=net, kernel=(3, 3), pool_type="max", stride=(2, 2))
net = _make_fire(net, 16, 64, 64)
net = _make_fire(net, 16, 64, 64)
net = _make_fire(net, 32, 128, 128)
net = mx.sym.Pooling(data=net, kernel=(3, 3), pool_type="max", stride=(2, 2))
net = _make_fire(net, 32, 128, 128)
net = _make_fire(net, 48, 192, 192)
net = _make_fire(net, 48, 192, 192)
net = _make_fire(net, 64, 256, 256)
net = mx.sym.Pooling(data=net, kernel=(3, 3), pool_type="max", stride=(2, 2))
net = _make_fire(net, 64, 256, 256)
else:
net = mx.sym.Convolution(net, num_filter=64, kernel=(3, 3), stride=(2, 2), pad=(1, 1))
net = mx.sym.Activation(net, act_type="relu")
net = mx.sym.Pooling(data=net, kernel=(3, 3), pool_type="max", stride=(2, 2))
net = _make_fire(net, 16, 64, 64)
net = _make_fire(net, 16, 64, 64)
net = mx.sym.Pooling(data=net, kernel=(3, 3), pool_type="max", stride=(2, 2))
net = _make_fire(net, 32, 128, 128)
net = _make_fire(net, 32, 128, 128)
net = mx.sym.Pooling(data=net, kernel=(3, 3), pool_type="max", stride=(2, 2))
net = _make_fire(net, 48, 192, 192)
net = _make_fire(net, 48, 192, 192)
net = _make_fire(net, 64, 256, 256)
net = _make_fire(net, 64, 256, 256)
net = mx.sym.Dropout(net, p=0.5)
net = mx.sym.Convolution(net, num_filter=num_classes, kernel=(1, 1))
net = mx.sym.Activation(net, act_type="relu")
net = mx.sym.Pooling(data=net, global_pool=True, kernel=(13, 13), pool_type="avg")
net = mx.sym.flatten(net)
return mx.sym.softmax(net)
| 1.726563 | 2 |
kube/volume.py | Stability-AI/kube | 0 | 12772720 | # 1. kubectl create -k "github.com/kubernetes-sigs/aws-fsx-csi-driver/deploy/kubernetes/overlays/stable/?ref=master"
# 2. python get_security_group.py
# 3. kubectl apply -f specs/eks/fsx.yml
import os
import sys
import tempfile
import time
from typing import List
import boto3
from kube2.types import Volume
from kube2.utils import (
check_name,
get_current_cluster,
get_volumes,
humanize_date,
load_template,
make_table,
sh,
sh_capture,
)
from kube2.aws_utils import (
get_cluster_vpc_id,
get_clusters,
get_security_group_id,
get_subnet_id,
)
def enable_fsx():
sh(f'kubectl create -k "github.com/kubernetes-sigs/aws-fsx-csi-driver/deploy/kubernetes/overlays/stable/?ref=master"')
def is_fsx_enabled():
s = sh_capture(f'kubectl get csidrivers.storage.k8s.io fsx.csi.aws.com').strip()
return s.startswith('NAME')
def create_and_configure_security_group(
*,
cluster_name: str,
volume_name: str,
vpc_id: str,
):
client = boto3.client('ec2', region_name='us-east-1')
group_name = f'{cluster_name}-{volume_name}-fsx'
sg_id = get_security_group_id(vpc_id=vpc_id, group_name=group_name)
# create if doesn't already exist
if sg_id is None:
print('Security group does not exist for cluster. Creating one...')
response = client.create_security_group(
GroupName=group_name,
Description=f'SG for FSx {cluster_name}-{volume_name}',
VpcId=vpc_id,
)
sg_id: str = response['GroupId']
client.authorize_security_group_ingress(
GroupId=sg_id,
IpPermissions=[
{
'IpProtocol': 'tcp',
'FromPort': 988,
'ToPort': 988,
'IpRanges': [{'CidrIp': '192.168.0.0/16'}],
},
],
)
else:
print('Security group already exists for cluster...')
return sg_id
def get_pvc_name(volume_name: str):
return f'pvc-{volume_name}'
def get_sc_name(volume_name: str):
return f'sc-{volume_name}'
class VolumeCLI(object):
'''
Create or destroy shared persistent volumes on FSx.
'''
def create(
self,
*,
name: str,
storage_size: str,
):
'''
Create a new FSx volume backed by S3.
'''
check_name(name)
if name in [v.name for v in get_volumes()]:
print(f'Error: Volume "{name}" already exists.')
sys.exit(1)
cluster_name = get_current_cluster()
if cluster_name is None:
print('No cluster selected. Switch to or create a cluster first.')
sys.exit(1)
# enable the FSx feature on this cluster
if not is_fsx_enabled():
enable_fsx()
vpc_id = get_cluster_vpc_id(cluster_name)
sg_id = create_and_configure_security_group(
cluster_name=cluster_name,
volume_name=name,
vpc_id=vpc_id
)
pvc_name = get_pvc_name(name)
sc_name = get_sc_name(name)
subnet_id = get_subnet_id(vpc_id)
assert subnet_id is not None
with tempfile.TemporaryDirectory() as tmpdir:
script_fn = os.path.join(tmpdir, 'fsx.yml')
script = load_template(
fn='templates/fsx.yml',
args={
'storage_class_name': sc_name,
's3_import_path': f's3://kube2-volumes/{name}',
's3_export_path': f's3://kube2-volumes/{name}/export',
'security_group_id': sg_id,
'persistent_volume_claim_name': pvc_name,
'storage_size': storage_size,
'storage_class_name': sc_name,
'subnet_id': subnet_id,
}
)
with open(script_fn, 'w') as f:
f.write(script)
print('Creating volume...')
sh(f'kubectl apply -f {script_fn}')
print('Waiting for FSx filesystem to be created (check progress here: https://console.aws.amazon.com/fsx/home?region=us-east-1)...')
for _ in range(60*2):
s = sh_capture(f'''kubectl get pvc pvc-my-vol -o 'jsonpath={{..status.phase}}' ''').strip()
if s == 'Bound':
break
time.sleep(1)
sh(f'kubectl describe pvc | tail -n 1')
def delete(
self,
*,
name: str,
):
'''
Delete an FSx volume.
'''
# TODO: more checks here around what can can't be deleted:
# - doesn't exist?
# - is attached to pods?
# - has some files? (maybe y/n checks)
pvc_name = get_pvc_name(name)
sc_name = get_sc_name(name)
try:
sh(f'kubectl delete pvc {pvc_name}')
except Exception as e:
print(e)
try:
sh(f'kubectl delete sc {sc_name}')
except Exception as e:
print(e)
def list(self):
'''
List all the volumes in the current cluster.
'''
volumes = get_volumes()
if len(volumes) == 0:
print('No volumes.')
else:
table = [['NAME', 'CAPACITY', 'USAGE', 'CREATED']]
for v in volumes:
table.append([
v.name,
v.capacity,
v.usage,
humanize_date(v.created),
])
print(make_table(table))
| 1.9375 | 2 |
src/qimpy/electrons/__init__.py | shankar1729/qimpy | 3 | 12772721 | """Electronic sub-system"""
# List exported symbols for doc generation
__all__ = [
"Electrons",
"Kpoints",
"Kmesh",
"Kpath",
"Fillings",
"Basis",
"Wavefunction",
"Davidson",
"CheFSI",
"SCF",
"LCAO",
"xc",
]
from ._electrons import Electrons
from ._kpoints import Kpoints, Kmesh, Kpath
from ._fillings import Fillings
from ._basis import Basis
from ._wavefunction import Wavefunction
from ._davidson import Davidson
from ._chefsi import CheFSI
from ._scf import SCF
from ._lcao import LCAO
from . import xc
| 1.210938 | 1 |
tests/test_secondary_rvs.py | sblunt/orbitize | 60 | 12772722 | import numpy as np
import os
from astropy.time import Time
from pandas import DataFrame
from orbitize.kepler import calc_orbit
from orbitize import read_input, system, sampler
def test_secondary_rv_lnlike_calc():
"""
Generates fake secondary RV data and asserts that
the log(likelihood) of the true parameters is what we expect.
Also tests that the primary and secondary RV orbits are related by
-m/mtot
"""
# define an orbit & generate secondary RVs
a = 10
e = 0
i = np.pi / 4
omega = 0
Omega = 0
tau = 0.3
m0 = 1
m1 = 0.1
plx = 10
orbitize_params_list = np.array([a, e, i, omega, Omega, tau, plx, m1, m0])
epochs = Time(np.linspace(2005, 2025, int(1e3)), format='decimalyear').mjd
_, _, rv_p = calc_orbit(epochs, a, e, i, omega, Omega, tau, plx, m0+m1, mass_for_Kamp=m0)
data_file = DataFrame(columns=['epoch', 'object','rv', 'rv_err'])
data_file.epoch = epochs
data_file.object = np.ones(len(epochs), dtype=int)
data_file.rv = rv_p
data_file.rv_err = np.ones(len(epochs)) * 0.01
data_file.to_csv('tmp.csv', index=False)
# set up a fit using the simulated data
data_table = read_input.read_file('tmp.csv')
mySys = system.System(1, data_table, m0, plx, mass_err=0.1, plx_err=0.1, fit_secondary_mass=True)
mySamp = sampler.MCMC(mySys)
computed_lnlike = mySamp._logl(orbitize_params_list)
# residuals should be 0
assert computed_lnlike == np.sum(-np.log(np.sqrt(2 * np.pi * data_file.rv_err.values**2)))
# clean up
os.system('rm tmp.csv')
# assert that the secondary orbit is the primary orbit scaled
_, _, rv = mySys.compute_all_orbits(orbitize_params_list)
rv0 = rv[:,0]
rv1 = rv[:,1]
assert np.all(rv0 == -m1 / m0 * rv1)
if __name__ == '__main__':
test_secondary_rv_lnlike_calc()
| 2.28125 | 2 |
evolvability.py | jal278/mazerobot-python | 5 | 12772723 | <gh_stars>1-10
import mazepy
#a function to map a robot's behavior into a grid of niches
def map_into_grid(robot,grid_sz):
x=mazepy.feature_detector.endx(robot)
y=mazepy.feature_detector.endy(robot)
x_grid=int(x*(grid_sz-1))
y_grid=int(y*(grid_sz-1))
return (x_grid,y_grid)
#function to calculate evolvability via PLoS paper metric
#do many one-step mutants from initial individual, see how many
#'unique' behaviors we get
def calc_evolvability(robot,grid_sz,mutations):
grids=set()
for x in range(mutations):
mutant=robot.copy()
mutant.mutate()
mutant.map()
grids.add(map_into_grid(mutant,grid_sz))
return len(grids)
if(__name__=='__main__'):
#initialize maze stuff with "medium maze"
mazepy.mazenav.initmaze("hard_maze_list.txt")
mazepy.mazenav.random_seed()
#create initial genome
robot=mazepy.mazenav()
#initalize it randomly and mutate it once for good measure
robot.init_rand()
robot.mutate()
#run genome in the maze simulator
robot.map()
#calculate evolvability
print "evolvability:", calc_evolvability(robot,30,200)
| 3.0625 | 3 |
scripts/region_annotation_tools/util/blob_widget.py | Flipajs/FERDA | 1 | 12772724 | <gh_stars>1-10
import logging
from PyQt4 import QtCore
from PyQt4 import QtGui
import sys
from core.graph.region_chunk import RegionChunk
from core.project.project import Project
import numpy as np
from gui.gui_utils import cvimg2qtpixmap
from gui.segmentation.my_scene import MyScene
from gui.segmentation.my_view import MyView
from gui.segmentation.painter import mask2qimage
from utils.drawing.points import get_contour_without_holes
__author__ = 'simon'
def convex_hull(points):
if len(points) < 3: return points
ret = []
comp = lambda A, B: -1 if A[0] < B[0] or A[0] == B[0] and A[1] < B[1] else 1
points = sorted(points, comp)
ret.append(points[0])
ret.append(points[1])
for p in points[2:] + list(reversed(points)):
while len(ret) > 1 and wedge_product(ret[-2], ret[-1], p) <= 0:
ret.pop()
ret.append(p)
ret.pop() # first == last
return np.array(ret)
def wedge_product(A, B, X):
return (X[0] - A[0]) * (B[1] - A[1]) - (X[1] - A[1]) * (B[0] - A[0])
class BlobWidget(QtGui.QWidget):
width = 1000
height = 1000
def __init__(self, project, tracklets, examples_from_tracklet, save_callback, exit_callback, contains_callback, threshold=.1):
QtGui.QWidget.__init__(self)
self.save_callback = save_callback
self.exit_callback = exit_callback
self.contains_callback = contains_callback
self.examples_from_tracklet = examples_from_tracklet
self.project = project
self.threshold = threshold
self.img_viewer = ImgPainter(self.project.img_manager, threshold)
self.region_generator = self.regions_gen(tracklets)
self.init_gui()
self.set_threshold(0.1)
self.r = None
self.tr_id = None
self.ants = []
def next_ant(self):
bm = (self.img_viewer.get_current_ant_bitmap())
points = set()
a, b = np.nonzero(bm)
for (y, x) in zip(a, b):
points.add((x,y))
hull = convex_hull(list(points))
contour = get_contour_without_holes(np.array(list(points)))
self.ants.append(contour)
self.img_viewer.reset_view()
def next_region(self):
self.img_viewer.reset()
self.show_current_selected_ants()
self.save_region()
self.last_id.setText("Last ID: {0}".format(self.r.id() if self.r is not None else '-'))
try:
self.r = next(self.region_generator)
except StopIteration:
self.exit_callback()
self.close()
return
self.curr_id.setText("Current ID: {0}".format(self.r.id()))
self.img_viewer.set_next(self.r)
self.roi_tickbox.setChecked(True)
def show_current_selected_ants(self):
if len(self.ants) == 0: return
import matplotlib.pyplot as plt
for c in self.ants:
plt.plot(c[:, 0], c[:, 1])
plt.gca().invert_yaxis()
plt.axis('equal')
plt.show()
def save_region(self):
if self.r is not None and len(self.ants) > 0:
self.save_callback(self.r.id(), self.r.frame_, self.tr_id, self.ants)
self.ants = []
def reset_region(self):
self.ants = []
self.reset_ant()
def reset_ant(self):
self.img_viewer.reset_view()
def get_results(self):
return self.results
def toggle_roi(self):
if self.roi_tickbox.isChecked():
self.img_viewer.show_roi()
else:
self.img_viewer.hide_roi()
def toggle_mode(self):
if self.img_viewer.mode == 1:
self.mode_button.setText("GREEN")
else:
self.mode_button.setText("RED")
self.img_viewer.mode = 1 - self.img_viewer.mode
def regions_gen(self, tracklets):
for tracklet in tracklets:
self.tr_id = tracklet.id()
rch = RegionChunk(tracklet, self.project.gm, self.project.rm)
step = len(rch)/(self.examples_from_tracklet-1)
if len(rch) < self.examples_from_tracklet:
idcs = [0]
else:
idcs = [i*step for i in range(self.examples_from_tracklet-1)] + [len(rch) - 1]
for i in idcs:
region = rch[i]
if self.contains_callback(region.id(), region.frame, tracklet.id()):
logging.info("Skipping region id {0} from tracklet {1} as it is already labeled".format(
region.id(), tracklet.id()
))
else:
yield region
def set_threshold(self, value):
value /= 100.0
value *= value
self.threshold = value
self.img_viewer.set_threshold(value)
def init_gui(self):
self.showMaximized()
self.layout = QtGui.QHBoxLayout()
self.left_part = QtGui.QWidget()
self.left_part.setSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
self.left_part.setLayout(QtGui.QVBoxLayout())
self.left_part.layout().setAlignment(QtCore.Qt.AlignTop)
self.slider = QtGui.QSlider(QtCore.Qt.Horizontal, self)
self.slider.setRange(0, 100)
self.slider.setTickInterval(10)
self.slider.setValue(self.threshold * 100)
self.slider.setTickPosition(QtGui.QSlider.TicksBelow)
self.slider.valueChanged[int].connect(self.set_threshold)
self.roi_tickbox = QtGui.QCheckBox("Roi")
self.roi_tickbox.clicked.connect(self.toggle_roi)
self.roi_tickbox.toggled.connect(self.toggle_roi)
self.buttons = QtGui.QWidget()
self.buttons.setSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Expanding)
self.buttons.setLayout(QtGui.QVBoxLayout())
self.buttons.layout().setAlignment(QtCore.Qt.AlignBottom)
self.next_region_button = QtGui.QPushButton('Next Region')
self.next_region_button.clicked.connect(self.next_region)
self.next_ant_button = QtGui.QPushButton('Next Ant')
self.next_ant_button.clicked.connect(self.next_ant)
self.reset_ant_button = QtGui.QPushButton('Reset Ant')
self.reset_ant_button.clicked.connect(self.reset_ant)
self.reset_region_button = QtGui.QPushButton('Reset Region')
self.reset_region_button.clicked.connect(self.reset_region)
self.mode_button = QtGui.QPushButton('RED')
self.mode_button.clicked.connect(self.toggle_mode)
self.show_selected_button = QtGui.QPushButton('Show selected')
self.show_selected_button.clicked.connect(self.show_current_selected_ants)
self.quit = QtGui.QPushButton('save and quit', self)
self.connect(self.quit, QtCore.SIGNAL('clicked()'), self.exit_callback)
self.buttons.layout().addWidget(self.mode_button)
self.buttons.layout().addWidget(self.next_ant_button)
self.buttons.layout().addWidget(self.reset_ant_button)
self.buttons.layout().addWidget(self.show_selected_button)
self.buttons.layout().addWidget(self.next_region_button)
self.buttons.layout().addWidget(self.reset_region_button)
self.buttons.layout().addWidget(self.quit)
self.help = QtGui.QLabel("Scroll to change sensitivity")
self.curr_id = QtGui.QLabel("")
self.last_id = QtGui.QLabel("")
self.left_part.layout().addWidget(self.slider)
self.left_part.layout().addWidget(self.roi_tickbox)
self.left_part.layout().addWidget(self.buttons)
self.left_part.layout().addWidget(self.help)
self.left_part.layout().addWidget(self.curr_id)
self.layout.addWidget(self.left_part)
self.layout.addWidget(self.img_viewer)
self.setLayout(self.layout)
class ImgPainter(MyView):
WB_dist = np.math.sqrt(3 * np.math.pow(255, 2))
img = None
img_roi = None
img_pixmap = None
roi_pixmap = None
img_z_value = 0
roi_z_value = 1
bitmaps_z_value = 2
GREEN = 1
RED = 0
def __init__(self, img_manager, threshold=.1):
super(ImgPainter, self).__init__()
self.setMouseTracking(False) #override parent
self.img_manager = img_manager
self.scene = MyScene()
self.setScene(self.scene)
self.threshold = threshold
self.last_x = None
self.last_y = None
self.x = []
self.y = []
self.visited = np.array((0,0))
self.selected = np.array((0,0))
self.excluded = np.array((0,0))
self.tmp = np.array((0,0))
self.bitmask_pixmap = None
self.exclude_pixmap = None
self.points_pixmap = None
self.mode = self.GREEN
self.pen_size = 1
def set_threshold(self, threshold):
# TODO another bug : when user has some points selected, sets threshold and then clicks green point,
# data are saved
self.threshold = threshold
self.update_last()
self.draw()
def update_last(self):
if self.last_x is not None and self.last_y is not None:
self.visited.fill(False)
self.tmp.fill(False)
self.floodfill(self.img[self.last_x, self.last_y], self.last_x, self.last_y)
def update_all(self):
self.selected.fill(False)
self.tmp.fill(False)
for x, y in zip(self.x, self.y):
self.visited.fill(False)
self.floodfill(self.img[x, y], x, y)
def save_results(self):
self.selected = (self.selected | self.tmp) & (1 - self.excluded)
def get_current_ant_bitmap(self):
self.save_results()
return self.selected
def reset_view(self):
self.reset()
self.draw()
def reset(self):
self.x = []
self.y = []
self.last_x = None
self.last_y = None
self.visited.fill(False)
self.selected.fill(False)
self.excluded.fill(False)
self.tmp.fill(False)
# self.bitmask_pixmap = None
# self.exclude_pixmap = None
# self.points_pixmap = None
def floodfill(self, color, x, y):
stack = [self.find_line_segment(color, x, y)]
while len(stack) > 0:
segment = stack.pop()
x = segment[0]
for y in range(segment[1], segment[2] + 1):
if x - 1 >= 0:
if self.is_similar(color, x - 1, y):
stack.append(self.find_line_segment(color, x - 1, y))
else:
self.visited[x - 1, y] = True
if x + 1 < self.img.shape[0]:
if self.is_similar(color, x + 1, y):
stack.append(self.find_line_segment(color, x + 1, y))
else:
self.visited[x + 1, y] = True
def find_line_segment(self, color, x, y):
y1 = y2 = y
self.visited[x, y] = True
self.tmp[x, y] = True
while y1 - 1 >= 0:
self.visited[x, y1] = True
if self.is_similar(color, x, y1 - 1):
self.tmp[x, y1] = True
y1 -= 1
else:
break
while y2 + 1 < self.img.shape[1]:
self.visited[x, y2] = True
if self.is_similar(color, x, y2 + 1):
self.tmp[x, y2] = True
y2 += 1
else:
break
return x, y1, y2
def is_similar(self, color, x, y):
return not self.visited[x, y] and not self.excluded[x, y] and self.are_colors_similar(color, self.img[x, y])
def are_colors_similar(self, color1, color2):
# euclidian distance
dist = 0
for c1, c2 in zip(color1, color2):
dist += np.math.pow(int(c1) - int(c2), 2)
return self.threshold * self.WB_dist >= np.math.sqrt(dist)
def add_point(self, point):
if type(point) == QtCore.QPointF:
point = point.toPoint()
# different canvas and array indexing
x = point.x()
y = point.y()
x, y = y, x
if self.mode == self.GREEN:
if not self.excluded[x, y]:
self.last_x = x
self.last_y = y
self.x.append(x)
self.y.append(y)
self.update_last()
else:
self.excluded[x, y] = True
self.update_all()
self.draw()
def show_img(self):
self.img_pixmap.setZValue(self.img_z_value)
self.exclude_pixmap.setZValue(self.img_z_value)
self.points_pixmap.setZValue(self.img_z_value)
def show_roi(self):
self.roi_pixmap.setZValue(self.roi_z_value)
def hide_roi(self):
self.roi_pixmap.setZValue(-1)
def set_next(self, region):
self.scene.clear()
self.exclude_pixmap = None
self.bitmask_pixmap = None
self.points_pixmap = None
self.img = self.img_manager.get_crop(region.frame(), region,
default_color=(255, 255, 255, 0))
self.img_roi = self.img_manager.get_crop(region.frame(), region)
self.visited = np.zeros(self.img.shape[:2], dtype=bool)
self.selected = np.zeros(self.img.shape[:2], dtype=bool)
self.excluded = np.zeros(self.img.shape[:2], dtype=bool)
self.tmp = np.zeros(self.img.shape[:2], dtype=bool)
self.img_pixmap = self.scene.addPixmap(cvimg2qtpixmap(self.img))
self.roi_pixmap = self.scene.addPixmap(cvimg2qtpixmap(self.img_roi))
self.fitInView(self.scene.sceneRect(), QtCore.Qt.KeepAspectRatio)
def draw_bitmask(self, mask, r=0, g=0, b=0):
r = np.asarray(mask * r, dtype=np.uint8)
g = np.asarray(mask * g, dtype=np.uint8)
b = np.asarray(mask * b, dtype=np.uint8)
a = np.full_like(mask, 80, dtype=np.uint8)
rgb = np.dstack((r, g, b, a))
qimg = mask2qimage(mask, rgb)
pixmap = self.scene.addPixmap(QtGui.QPixmap.fromImage(qimg))
return pixmap
def draw_points(self, x, y):
r = g = np.zeros_like(self.selected, dtype=np.uint8)
b = np.zeros_like(self.selected, dtype=np.uint8)
mask = np.zeros_like(self.selected)
for x, y in zip(x, y):
mask[x, y] = True
b[x, y] = 255
a = np.full_like(mask, 255, dtype=np.uint8)
rgb = np.dstack((r, g, b, a))
qimg = mask2qimage(mask, rgb)
pixmap = self.scene.addPixmap(QtGui.QPixmap.fromImage(qimg))
return pixmap
def draw(self):
if self.bitmask_pixmap is not None:
self.scene.removeItem(self.bitmask_pixmap)
if self.exclude_pixmap is not None:
self.scene.removeItem(self.exclude_pixmap)
if self.points_pixmap is not None:
self.scene.removeItem(self.points_pixmap)
self.bitmask_pixmap = self.draw_bitmask((self.selected | self.tmp) & (1 - self.excluded), g=255)
self.bitmask_pixmap.setZValue(self.bitmaps_z_value)
self.exclude_pixmap = self.draw_bitmask(self.excluded, r=255)
self.exclude_pixmap.setZValue(self.bitmaps_z_value)
self.points_pixmap = self.draw_points(self.x, self.y)
self.points_pixmap.setZValue(self.bitmaps_z_value)
def mousePressEvent(self, e):
super(ImgPainter, self).mousePressEvent(e)
point = self.mapToScene(e.pos())
if self.scene.itemsBoundingRect().contains(point):
if self.mode == self.GREEN:
self.save_results()
self.add_point(point)
def mouseMoveEvent(self, e):
super(ImgPainter, self).mouseMoveEvent(e)
point = self.mapToScene(e.pos())
if self.scene.itemsBoundingRect().contains(point):
if self.mode == self.GREEN:
self.save_results()
self.add_point(point)
def mouseReleaseEvent(self, QMouseEvent):
self.draw()
def wheelEvent(self, event):
modifiers = QtGui.QApplication.keyboardModifiers()
if modifiers != QtCore.Qt.ControlModifier:
val = np.sqrt(self.threshold)
if event.delta() > 0:
val += 0.01
else:
val -= 0.01
self.set_threshold(max(0, min(1, val * val)))
# TODO disabled for the time being due to the bug
# else:
# self.zoomAction(event, scale_factor=1.04)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
project = Project()
project.load("/home/simon/FERDA/projects/Cam1_/cam1.fproj")
chunks = project.gm.chunk_list()
chunks_with_clusters = [6, 10, 12, 13, 17, 18, 26, 28, 29, 32, 37, 39, 40, 41, 43, 47, 51, 54, 57, 58, 60, 61, 65,
67, 69, 73, 75, 78, 81, 84, 87, 90, 93, 94, 96, 99, 102, 105]
chunks_with_clusters = [chunks[x] for x in chunks_with_clusters]
app = QtGui.QApplication(sys.argv)
gt = BlobWidget(project, chunks_with_clusters)
gt.show()
app.exec_()
| 2.125 | 2 |
msspeak/msspeak.py | afkmamunbd/python-msspeak | 0 | 12772725 | <gh_stars>0
# -*- coding: utf-8 -*-
import requests
import datetime
import os
from voices import find_voice
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
class AccessError(Exception):
def __init__(self, response):
self.status_code = response.status_code
data = response.json()
super(AccessError, self).__init__(data["message"])
class ArgumentOutOfRangeException(Exception):
def __init__(self, message):
self.message = message.replace('ArgumentOutOfRangeException: ', '')
super(ArgumentOutOfRangeException, self).__init__(self.message)
class TranslateApiException(Exception):
def __init__(self, message, *args):
self.message = message.replace('TranslateApiException: ', '')
super(TranslateApiException, self).__init__(self.message, *args)
class LanguageException(Exception):
def __init__(self, message):
self.message = str(message)
super(LanguageException, self).__init__(self.message)
class AccessToken(object):
access_url = "https://eastus.api.cognitive.microsoft.com/sts/v1.0/issueToken"
expire_delta = datetime.timedelta(minutes=9) # speech API valid for 10 minutes, actually
def __init__(self, subscription_key, region = 'eastus'):
if region != 'eastus':
self.access_url = self.access_url.replace('eastus', region)
self.subscription_key = subscription_key
self._token = None
self._expdate = None
def __call__(self, r):
r.headers['Authorization'] = "Bearer " + self.token
return r
def request_token(self):
headers = {
'Ocp-Apim-Subscription-Key': self.subscription_key
}
resp = requests.post(self.access_url, headers=headers)
if resp.status_code == 200:
self._token = resp.text
self._expdate = datetime.datetime.now() + self.expire_delta
else:
raise AccessError(resp)
@property
def expired(self):
return datetime.datetime.now() > self._expdate
@property
def token(self):
if not self._token or self.expired:
self.request_token()
return self._token
class Speech(object):
"""
Implements API for the Bing Speech service
"""
region = 'eastus'
api_url = "https://eastus.tts.speech.microsoft.com/cognitiveservices/v1"
def __init__(self, subscription_key, region = 'eastus'):
if region != 'eastus':
self.api_url = self.api_url.replace('eastus', region)
self.auth = AccessToken(subscription_key, region)
def make_request(self, headers, data):
resp = requests.post(self.api_url, auth=self.auth, headers=headers, data=data)
return resp
def speak(self, text, lang, gender, format):
"""
Gather parameters and call.
:param text: Text to be sent to Bing TTS API to be converted to speech
:param lang: Language to be spoken
:param gender: Gender of the speaker
:param format: File format (see link below)
"""
if not gender:
gender = 'Female'
else:
gender = gender.capitalize()
if not lang:
lang = 'en-US'
voice = find_voice(locale=lang, gender=gender, neural=True)
if not voice:
raise LanguageException("Invalid language/gender combination: %s, %s" % (lang, gender))
if not format:
format = 'riff-8khz-8bit-mono-mulaw'
headers = {
"Content-type": "application/ssml+xml; charset=utf-8",
"X-Microsoft-OutputFormat": format,
"User-Agent": "TTSForPython"
}
body = "<speak version='1.0' xml:lang='%s'><voice xml:lang='%s' xml:gender='%s' name='%s'>%s</voice></speak>" % (lang, lang, gender, voice['Name'], str(text))
return self.make_request(headers, body)
def speak_to_file(self, file, *args, **kwargs):
resp = self.speak(*args, **kwargs)
if isinstance(file, str):
with open(file, 'wb'):
file.write(resp.content)
elif hasattr(file, 'write'):
file.write(resp.content)
else:
raise ValueError('Expected filepath or a file-like object')
class MSSpeak(object):
cache = True
def __init__(self, subscription_key, directory='', region='eastus'):
"""construct Microsoft Translate TTS"""
self.speech = Speech(subscription_key, region=region)
self.tts_engine = 'bing_speech'
self.directory = directory
self.filename = None
def set_cache(self, value=True):
"""
Enable Cache of file, if files already stored return this filename
"""
self.cache = value
def speak(self, textstr, lang='en-US', gender='female', format='riff-16khz-16bit-mono-pcm'):
"""
Run will call Microsoft Translate API and and produce audio
"""
# print("speak(textstr=%s, lang=%s, gender=%s, format=%s)" % (textstr, lang, gender, format))
concatkey = '%s-%s-%s-%s' % (textstr, lang.lower(), gender.lower(), format)
key = self.tts_engine + '' + str(hash(concatkey))
self.filename = '%s-%s.mp3' % (key, lang)
# check if file exists
fileloc = self.directory + self.filename
if self.cache and os.path.isfile(self.directory + self.filename):
return self.filename
else:
with open(fileloc, 'wb') as f:
self.speech.speak_to_file(f, textstr, lang, gender, format)
return self.filename
return False
if __name__ == "__main__":
subscription_key = '<KEY>'
speech = Speech(subscription_key)
# format = 'riff-16khz-16bit-mono-pcm'
format = 'audio-16khz-64kbitrate-mono-mp3'
lang = 'en-GB'
gender = 'Female'
tts_msspeak = MSSpeak(subscription_key, '/tmp/')
output_filename = tts_msspeak.speak("<NAME> Picked a peck of pickled peppers. Complicated words like R.N.I.B., macular degeneration, diabetes and retinitis pigmentosa could also be pronounced.", lang, gender, format)
print ("Recorded TTS to /tmp/%s" % output_filename)
| 2.1875 | 2 |
auxiclean/unittests/test_excel_manager.py | physumasso/auxiclean | 1 | 12772726 | <filename>auxiclean/unittests/test_excel_manager.py
from .test_selector import TestBase
from openpyxl import load_workbook
from auxiclean import Selector
from auxiclean.exceptions import ExcelError
from collections import OrderedDict
class TestExcelManager(TestBase):
# two different courses
courses = OrderedDict({"Electro": {"code": "1441",
"disponibilities": 1,
"discipline": "générale"},
"Astro": {"code": "2710",
"disponibilities": 1,
"discipline": "générale"}})
# two candidates each applying for a different course. No conflict
candidates = {"<NAME>": {"maximum": 2,
"scolarity": 2,
"courses given": ["1441", "2710", "2710",
"2710", "2710", "1620"],
"nobels": 0,
"discipline": "générale",
"choices": ["1441", ],
"gpa": 2.6},
"<NAME>": {"maximum": 2,
"scolarity": 3,
"courses given": ["1651", "3131"],
"nobels": 0,
"discipline": "générale",
"choices": ["2710", ],
"gpa": 3.0}}
def test_writing_distribution(self):
# make distribution
self.selector = Selector(self.data_path)
# open excel file
wb = load_workbook(self.data_path)
# check that the sheet exists
self.assertIn("Distribution", wb.sheetnames)
# get sheet
ws = wb["Distribution"]
# check that first row is the titles
self.assertEqual(ws["A1"].value.lower(), "sigle")
self.assertEqual(ws["B1"].value.lower(), "distribution")
# check that next rows are courses with selection
allcodes = [x["code"] for x in self.courses.values()]
self.assertIn(ws["A2"].value, allcodes)
self.assertIn(ws["A3"].value, allcodes)
# only two courses, next rows should be empty
self.assertIs(ws["A4"].value, None)
for row in ws.iter_rows(min_col=1, max_col=2, min_row=2, max_row=3):
if row[0].value == "1441":
self.assertEqual(row[1].value, "<NAME>")
else:
self.assertEqual(row[1].value, "<NAME>")
def test_distribution_sheet_already_exists(self):
# load workbook and add a distribution cheet
wb = load_workbook(self.data_path)
# create distribution sheet
ws = wb.create_sheet("Distribution")
safe_string = "Check that this string is not erased."
ws["A1"].value = safe_string
wb.save(self.data_path)
del wb
# call selector and check that already existing sheet is not erased
self.selector = Selector(self.data_path)
# reload wb
wb = load_workbook(self.data_path)
self.assertIn("Distribution1", wb.sheetnames)
# check that the previous sheet was not overridden
ws = wb["Distribution"]
self.assertEqual(ws["A1"].value, safe_string)
del wb
class TestExcelCandidateChoiceError(TestBase):
# two different courses
courses = OrderedDict({"Electro": {"code": "1441",
"disponibilities": 1,
"discipline": "générale"},
"Astro": {"code": "2710",
"disponibilities": 1,
"discipline": "générale"}})
# two candidates each applying for a different course. No conflict
# one of a candidate's choice not in the course list.
candidates = {"<NAME>": {"maximum": 2,
"scolarity": 2,
"courses given": ["1441", "2710", "2710",
"2710", "2710", "1620"],
"nobels": 0,
"discipline": "générale",
"choices": ["1441", "1234"], # not in courses
"gpa": 2.6},
"<NAME>": {"maximum": 2,
"scolarity": 3,
"courses given": ["1651", "3131"],
"nobels": 0,
"discipline": "générale",
"choices": ["2710", ],
"gpa": 3.0}}
def test_raise_choice_error(self):
with self.assertRaises(ExcelError):
self.selector = Selector(self.data_path)
| 2.734375 | 3 |
Chapter 02/PieGame.py | smartdong/PythonPractise | 0 | 12772727 | import math
import pygame
import sys
from pygame.locals import *
pygame.init()
screen = pygame.display.set_mode((600,500))
pygame.display.set_caption("The Pie Game - Press 1,2,3,4")
myfont = pygame.font.Font(None, 60)
color = 200, 80, 60
width = 4
x = 300
y = 250
radius = 200
position = x-radius, y-radius, radius*2, radius*2
piece1 = False
piece2 = False
piece3 = False
piece4 = False
while True:
for event in pygame.event.get():
if event.type == QUIT:
sys.exit()
elif event.type == KEYUP:
if event.key == pygame.K_ESCAPE:
sys.exit()
elif event.key == pygame.K_1:
piece1 = True
elif event.key == pygame.K_2:
piece2 = True
elif event.key == pygame.K_3:
piece3 = True
elif event.key == pygame.K_4:
piece4 = True
screen.fill((0,0,200))
textImg1 = myfont.render("1", True, color)
screen.blit(textImg1, (x+radius/2-20, y-radius/2))
textImg2 = myfont.render("2", True, color)
screen.blit(textImg2, (x-radius/2, y-radius/2))
textImg3 = myfont.render("3", True, color)
screen.blit(textImg3, (x-radius/2, y+radius/2-20))
textImg4 = myfont.render("4", True, color)
screen.blit(textImg4, (x+radius/2-20, y+radius/2-20))
if piece1:
start_angle = math.radians(0)
end_angle = math.radians(90)
pygame.draw.arc(screen, color, position, start_angle, end_angle, width)
pygame.draw.line(screen, color, (x,y), (x,y-radius), width)
pygame.draw.line(screen, color, (x,y), (x+radius,y), width)
if piece2:
start_angle = math.radians(90)
end_angle = math.radians(180)
pygame.draw.arc(screen, color, position, start_angle, end_angle, width)
pygame.draw.line(screen, color, (x,y), (x,y-radius), width)
pygame.draw.line(screen, color, (x,y), (x-radius,y), width)
if piece3:
start_angle = math.radians(180)
end_angle = math.radians(270)
pygame.draw.arc(screen, color, position, start_angle, end_angle, width)
pygame.draw.line(screen, color, (x,y), (x-radius,y), width)
pygame.draw.line(screen, color, (x,y), (x,y+radius), width)
if piece4:
start_angle = math.radians(270)
end_angle = math.radians(360)
pygame.draw.arc(screen, color, position, start_angle, end_angle, width)
pygame.draw.line(screen, color, (x,y), (x,y+radius), width)
pygame.draw.line(screen, color, (x,y), (x+radius,y), width)
if piece1 and piece2 and piece3 and piece4:
color = 0,255,0
pygame.display.update()
| 3.28125 | 3 |
tests/classification/test_hamming_distance.py | awaelchli/metrics | 1 | 12772728 | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import torch
from sklearn.metrics import hamming_loss as sk_hamming_loss
from tests.classification.inputs import _input_binary, _input_binary_prob
from tests.classification.inputs import _input_multiclass as _input_mcls
from tests.classification.inputs import _input_multiclass_prob as _input_mcls_prob
from tests.classification.inputs import _input_multidim_multiclass as _input_mdmc
from tests.classification.inputs import _input_multidim_multiclass_prob as _input_mdmc_prob
from tests.classification.inputs import _input_multilabel as _input_mlb
from tests.classification.inputs import _input_multilabel_multidim as _input_mlmd
from tests.classification.inputs import _input_multilabel_multidim_prob as _input_mlmd_prob
from tests.classification.inputs import _input_multilabel_prob as _input_mlb_prob
from tests.helpers.testers import THRESHOLD, MetricTester
from torchmetrics import HammingDistance
from torchmetrics.classification.checks import _input_format_classification
from torchmetrics.functional import hamming_distance
torch.manual_seed(42)
def _sk_hamming_loss(preds, target):
sk_preds, sk_target, _ = _input_format_classification(preds, target, threshold=THRESHOLD)
sk_preds, sk_target = sk_preds.numpy(), sk_target.numpy()
sk_preds, sk_target = sk_preds.reshape(sk_preds.shape[0], -1), sk_target.reshape(sk_target.shape[0], -1)
return sk_hamming_loss(y_true=sk_target, y_pred=sk_preds)
@pytest.mark.parametrize(
"preds, target",
[
(_input_binary_prob.preds, _input_binary_prob.target),
(_input_binary.preds, _input_binary.target),
(_input_mlb_prob.preds, _input_mlb_prob.target),
(_input_mlb.preds, _input_mlb.target),
(_input_mcls_prob.preds, _input_mcls_prob.target),
(_input_mcls.preds, _input_mcls.target),
(_input_mdmc_prob.preds, _input_mdmc_prob.target),
(_input_mdmc.preds, _input_mdmc.target),
(_input_mlmd_prob.preds, _input_mlmd_prob.target),
(_input_mlmd.preds, _input_mlmd.target),
],
)
class TestHammingDistance(MetricTester):
@pytest.mark.parametrize("ddp", [True, False])
@pytest.mark.parametrize("dist_sync_on_step", [False, True])
def test_hamming_distance_class(self, ddp, dist_sync_on_step, preds, target):
self.run_class_metric_test(
ddp=ddp,
preds=preds,
target=target,
metric_class=HammingDistance,
sk_metric=_sk_hamming_loss,
dist_sync_on_step=dist_sync_on_step,
metric_args={"threshold": THRESHOLD},
)
def test_hamming_distance_fn(self, preds, target):
self.run_functional_metric_test(
preds,
target,
metric_functional=hamming_distance,
sk_metric=_sk_hamming_loss,
metric_args={"threshold": THRESHOLD},
)
@pytest.mark.parametrize("threshold", [1.5])
def test_wrong_params(threshold):
preds, target = _input_mcls_prob.preds, _input_mcls_prob.target
with pytest.raises(ValueError):
ham_dist = HammingDistance(threshold=threshold)
ham_dist(preds, target)
ham_dist.compute()
with pytest.raises(ValueError):
hamming_distance(preds, target, threshold=threshold)
| 2.109375 | 2 |
General_Laplace.py | Blue-Giant/SubspaceDNN_tf1Class | 1 | 12772729 | import tensorflow as tf
import numpy as np
def get_infos2Laplace_1D(input_dim=1, out_dim=1, intervalL=0.0, intervalR=1.0, equa_name=None):
# -uxx = f
if equa_name == 'PDE1':
# u=sin(pi*x), f=-pi*pi*sin(pi*x)
fside = lambda x: -(np.pi)*(np.pi)*tf.sin(np.pi*x)
utrue = lambda x: tf.sin(np.pi*x)
uleft = lambda x: tf.sin(np.pi*intervalL)
uright = lambda x: tf.sin(np.pi*intervalR)
return fside, utrue, uleft, uright
# 偏微分方程的一些信息:边界条件,初始条件,真解,右端项函数
def get_infos2Laplace_2D(input_dim=1, out_dim=1, left_bottom=0.0, right_top=1.0, equa_name=None):
if equa_name == 'PDE1':
# u=exp(-x)(x_y^3), f = -exp(-x)(x-2+y^3+6y)
f_side = lambda x, y: -(tf.exp(-1.0*x)) * (x - 2 + tf.pow(y, 3) + 6 * y)
u_true = lambda x, y: (tf.exp(-1.0*x))*(x + tf.pow(y, 3))
ux_left = lambda x, y: tf.exp(-left_bottom) * (tf.pow(y, 3) + 1.0 * left_bottom)
ux_right = lambda x, y: tf.exp(-right_top) * (tf.pow(y, 3) + 1.0 * right_top)
uy_bottom = lambda x, y: tf.exp(-x) * (tf.pow(left_bottom, 3) + x)
uy_top = lambda x, y: tf.exp(-x) * (tf.pow(right_top, 3) + x)
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE2':
f_side = lambda x, y: (-1.0)*tf.sin(np.pi*x) * (2 - np.square(np.pi)*tf.square(y))
u_true = lambda x, y: tf.square(y)*tf.sin(np.pi*x)
ux_left = lambda x, y: tf.square(y) * tf.sin(np.pi * left_bottom)
ux_right = lambda x, y: tf.square(y) * tf.sin(np.pi * right_top)
uy_bottom = lambda x, y: tf.square(left_bottom) * tf.sin(np.pi * x)
uy_top = lambda x, y: tf.square(right_top) * tf.sin(np.pi * x)
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE3':
# u=exp(x+y), f = -2*exp(x+y)
f_side = lambda x, y: -2.0*(tf.exp(x)*tf.exp(y))
u_true = lambda x, y: tf.exp(x)*tf.exp(y)
ux_left = lambda x, y: tf.multiply(tf.exp(y), tf.exp(left_bottom))
ux_right = lambda x, y: tf.multiply(tf.exp(y), tf.exp(right_top))
uy_bottom = lambda x, y: tf.multiply(tf.exp(x), tf.exp(left_bottom))
uy_top = lambda x, y: tf.multiply(tf.exp(x), tf.exp(right_top))
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE4':
# u=(1/4)*(x^2+y^2), f = -1
f_side = lambda x, y: -1.0*tf.ones_like(x)
u_true = lambda x, y: 0.25*(tf.pow(x, 2)+tf.pow(y, 2))
ux_left = lambda x, y: 0.25 * tf.pow(y, 2) + 0.25 * tf.pow(left_bottom, 2)
ux_right = lambda x, y: 0.25 * tf.pow(y, 2) + 0.25 * tf.pow(right_top, 2)
uy_bottom = lambda x, y: 0.25 * tf.pow(x, 2) + 0.25 * tf.pow(left_bottom, 2)
uy_top = lambda x, y: 0.25 * tf.pow(x, 2) + 0.25 * tf.pow(right_top, 2)
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE5':
# u=(1/4)*(x^2+y^2)+x+y, f = -1
f_side = lambda x, y: -1.0*tf.ones_like(x)
u_true = lambda x, y: 0.25*(tf.pow(x, 2)+tf.pow(y, 2)) + x + y
ux_left = lambda x, y: 0.25 * tf.pow(y, 2) + 0.25 * tf.pow(left_bottom, 2) + left_bottom + y
ux_right = lambda x, y: 0.25 * tf.pow(y, 2) + 0.25 * tf.pow(right_top, 2) + right_top + y
uy_bottom = lambda x, y: 0.25 * tf.pow(x, 2) + tf.pow(left_bottom, 2) + left_bottom + x
uy_top = lambda x, y: 0.25 * tf.pow(x, 2) + 0.25 * tf.pow(right_top, 2) + right_top + x
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE6':
# u=(1/2)*(x^2)*(y^2), f = -(x^2+y^2)
f_side = lambda x, y: -1.0*(tf.pow(x, 2)+tf.pow(y, 2))
u_true = lambda x, y: 0.5 * (tf.pow(x, 2) * tf.pow(y, 2))
ux_left = lambda x, y: 0.5 * (tf.pow(left_bottom, 2) * tf.pow(y, 2))
ux_right = lambda x, y: 0.5 * (tf.pow(right_top, 2) * tf.pow(y, 2))
uy_bottom = lambda x, y: 0.5 * (tf.pow(x, 2) * tf.pow(left_bottom, 2))
uy_top = lambda x, y: 0.5 * (tf.pow(x, 2) * tf.pow(right_top, 2))
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
elif equa_name == 'PDE7':
# u=(1/2)*(x^2)*(y^2)+x+y, f = -(x^2+y^2)
f_side = lambda x, y: -1.0*(tf.pow(x, 2)+tf.pow(y, 2))
u_true = lambda x, y: 0.5*(tf.pow(x, 2)*tf.pow(y, 2)) + x*tf.ones_like(x) + y*tf.ones_like(y)
ux_left = lambda x, y: 0.5 * tf.multiply(tf.pow(left_bottom, 2), tf.pow(y, 2)) + left_bottom + y
ux_right = lambda x, y: 0.5 * tf.multiply(tf.pow(right_top, 2), tf.pow(y, 2)) + right_top + y
uy_bottom = lambda x, y: 0.5 * tf.multiply(tf.pow(x, 2), tf.pow(left_bottom, 2)) + x + left_bottom
uy_top = lambda x, y: 0.5 * tf.multiply(tf.pow(x, 2), tf.pow(right_top, 2)) + x + right_top
return f_side, u_true, ux_left, ux_right, uy_bottom, uy_top
# 偏微分方程的一些信息:边界条件,初始条件,真解,右端项函数
def get_infos2Laplace_3D(input_dim=1, out_dim=1, intervalL=0.0, intervalR=1.0, equa_name=None):
if equa_name == 'PDE1':
# -Laplace U = f
# u=sin(pi*x)*sin(pi*y)*sin(pi*z), f=-pi*pi*sin(pi*x)*sin(pi*y)*sin(pi*z)
fside = lambda x, y, z: -(np.pi)*(np.pi)*tf.sin(np.pi*x)
utrue = lambda x, y, z: tf.sin(np.pi*x)*tf.sin(np.pi*y)*tf.sin(np.pi*z)
u_00 = lambda x, y, z: tf.sin(np.pi*intervalL)*tf.sin(np.pi*y)*tf.sin(np.pi*z)
u_01 = lambda x, y, z: tf.sin(np.pi*intervalR)*tf.sin(np.pi*y)*tf.sin(np.pi*z)
u_10 = lambda x, y, z: tf.sin(np.pi*x)*tf.sin(np.pi*intervalL)*tf.sin(np.pi*z)
u_11 = lambda x, y, z: tf.sin(np.pi*x)*tf.sin(np.pi*intervalR)*tf.sin(np.pi*z)
u_20 = lambda x, y, z: tf.sin(np.pi*x)*tf.sin(np.pi*y)*tf.sin(np.pi*intervalL)
u_21 = lambda x, y, z: tf.sin(np.pi*x)*tf.sin(np.pi*y)*tf.sin(np.pi*intervalR)
return fside, utrue, u_00, u_01, u_10, u_11, u_20, u_21
# 偏微分方程的一些信息:边界条件,初始条件,真解,右端项函数
def get_infos2Laplace_5D(input_dim=1, out_dim=1, intervalL=0.0, intervalR=1.0, equa_name=None):
if equa_name == 'PDE1':
# u=sin(pi*x), f=-pi*pi*sin(pi*x)
fside = lambda x, y, z, s, t: -(np.pi)*(np.pi)*tf.sin(np.pi*x)
utrue = lambda x, y, z, s, t: tf.sin(np.pi*x)*tf.sin(np.pi*y)*tf.sin(np.pi*z)*tf.sin(np.pi*s)*tf.sin(np.pi*t)
u_00 = lambda x, y, z, s, t: tf.sin(np.pi*intervalL)*tf.sin(np.pi*y)*tf.sin(np.pi*z)*tf.sin(np.pi*s)*tf.sin(np.pi*t)
u_01 = lambda x, y, z, s, t: tf.sin(np.pi*intervalR)*tf.sin(np.pi*y)*tf.sin(np.pi*z)*tf.sin(np.pi*s)*tf.sin(np.pi*t)
u_10 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * intervalL) * tf.sin(np.pi * z) * tf.sin(np.pi * s) * tf.sin(np.pi * t)
u_11 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * intervalR) * tf.sin(np.pi * z) * tf.sin(np.pi * s) * tf.sin(np.pi * t)
u_20 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * intervalL) * tf.sin(np.pi * s) * tf.sin(np.pi * t)
u_21 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * intervalR) * tf.sin(np.pi * s) * tf.sin(np.pi * t)
u_30 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * z) * tf.sin(np.pi * intervalL) * tf.sin(np.pi * t)
u_31 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * z) * tf.sin(np.pi * intervalR) * tf.sin(np.pi * t)
u_40 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * z) * tf.sin(np.pi * s) * tf.sin(np.pi * intervalL)
u_41 = lambda x, y, z, s, t: tf.sin(np.pi * x) * tf.sin(np.pi * y) * tf.sin(np.pi * z) * tf.sin(np.pi * s) * tf.sin(np.pi * intervalR)
return fside, utrue, u_00, u_01, u_10, u_11, u_20, u_21, u_30, u_31, u_40, u_41 | 2.65625 | 3 |
decorator_test/test.py | ehddn5252/dw_test | 0 | 12772730 | <filename>decorator_test/test.py
def decorating_fun(func):
def wrapping_function():
print("this is wrapping function and get func start")
func()
print("func end")
return wrapping_function
@decorating_fun
def decorated_func():
print("i`m decoraed")
decorated_func() | 3.203125 | 3 |
Chapter06_code/Ch06_R06/some_model_ch06r06/models.py | PacktPublishing/Odoo-Development-Cookbook | 55 | 12772731 | <reponame>PacktPublishing/Odoo-Development-Cookbook
# coding: utf-8
from openerp import models, api, fields
class LibraryMember(models.Model):
_name = 'library.member'
@api.multi
def return_all_books(self):
self.ensure_one
wizard = self.env['library.returns.wizard']
values = {'member_id': self.id}
specs = wizard._onchange_spec()
updates = wizard.onchange(values, ['member_id'], specs)
values.update(updates.get('value', {}))
record = wizard.create(values)
| 1.960938 | 2 |
examples/Parabola/genParabolaData.py | DomiDre/modelexp | 0 | 12772732 | import modelexp
from modelexp.experiments import Generic
from modelexp.models.Generic import Parabola
import numpy as np
import random
app = modelexp.App()
app.setExperiment(Generic)
modelRef = app.setModel(Parabola)
modelRef.defineDomain(np.linspace(-3, 3, 100))
modelRef.setParam('a', 1.3)
modelRef.setParam('x0', 0.3)
modelRef.setParam('c', -0.2)
modelRef.calcModel()
sig_y = 0.05*modelRef.y
randomized_y = []
for i in range(len(modelRef.y)):
randomized_y.append(random.gauss(modelRef.y[i], 0.05*modelRef.y[i]))
randomized_y = np.array(randomized_y)
with open('parabolaData.xye', 'w') as f:
for i in range(len(modelRef.y)):
f.write(f'{modelRef.x[i]}\t{randomized_y[i]}\t{sig_y[i]}\n')
| 2.59375 | 3 |
bandcamp_radio/bandcamp.py | richardmalone/bandcamp-radio | 1 | 12772733 | <reponame>richardmalone/bandcamp-radio<filename>bandcamp_radio/bandcamp.py
import requests
import json
from html.parser import HTMLParser
import time
class _FanIdParser(HTMLParser):
def error(self, message):
pass
fan_id = ""
def handle_starttag(self, tag, attrs):
for attr in attrs:
if attr[0] == "data-blob":
data_html = attr[1]
data = json.loads(data_html)
self.fan_id = data['fan_data']['fan_id']
def discover(genre="all", sub_genre="any", slice="best", page=0):
url = F"https://bandcamp.com/api/discover/3/get_web?g={genre}&t={sub_genre}&s={slice}&p={page}&f=all"
request = requests.get(url)
json.loads(request.content)
print("got", genre, sub_genre, slice)
return json.loads(request.content)['items']
def get_fan_id(name):
url = F"https://bandcamp.com/{name}"
request = requests.get(url)
parser = _FanIdParser()
content = request.content
parser.feed(content.decode('utf-8'))
return parser.fan_id
def get_collection(fan_id, count=1000):
url = "https://bandcamp.com/api/fancollection/1/collection_items"
token = get_token()
body = F'{{"fan_id": "{fan_id}", "older_than_token": "{token}", "count":"{count}"}}'
x = requests.post(url, data=body)
return json.loads(x.text)['items']
def get_token():
return str(int(time.time())) + "::FOO::" | 2.96875 | 3 |
sample_table.py | stko/pyvotab | 0 | 12772734 | <filename>sample_table.py
from pprint import pprint
from pyvotab import Pyvotab, PyvoStyles
t1 = [
['Hans', 'Mueller', 'Hamburg', 'Postweg', 8],
['Klaus', 'Meier', 'Hamburg', 'Feldplatz', 5],
['Klaus', 'Meier', 'Berlin', 'Burgallee', 4],
['Klaus', 'Schulze', 'Berlin', 'Burgallee', 3],
]
t2 = [
['Hins', 'Mueller', 'Hamburg', 'Postweg', 8],
['Klaus', 'Meier', 'Hamburg', 'Feldplatz', 6],
['Klaus', 'Meier', 'Berlin', 'Burgallee', 4],
['Klaus', 'Schulze', 'Berlin', 'Burgallee', 3],
['Klaus', 'Schulze', 'Berlin', 'am Deich', 9],
['Hans', 'Mueller', 'Berlin', 'am Deich', 10],
]
t3 = [
['Hans', 'Mueller', 'Hamburg', 'Postweg', 8]
]
t4 = [
['Hins', 'Mueller', 'Hamburg', 'Postweg', 8]
]
t5 = [ # = t1 with column header
['Vorname', 'Nachname', 'Stadt', 'Straße', 'Hausnummer'],
['Hans', 'Mueller', 'Hamburg', 'Postweg', 8],
['Klaus', 'Meier', 'Hamburg', 'Feldplatz', 5],
['Klaus', 'Meier', 'Berlin', 'Burgallee', 4],
['Klaus', 'Schulze', 'Berlin', 'Burgallee', 3],
]
t6 = [ # = t2 with column header
['Vorname', 'Nachname', 'Stadt', 'Straße', 'Hausnummer'],
['Hins', 'Mueller', 'Hamburg', 'Postweg', 8],
['Klaus', 'Meier', 'Hamburg', 'Feldplatz', 5],
['Klaus', 'Meier', 'Berlin', 'Burgallee', 4],
['Klaus', 'Schulze', 'Berlin', 'Burgallee', 3],
['Klaus', 'Schulze', 'Berlin', 'am Deich', 9],
['Hans', 'Mueller', 'Berlin', 'am Deich', 10],
]
p1 = [ # Pivot test table
['Product', 'Day', 'Sold'],
['Soap', 'Mo', 8],
['Soap', 'Mo', 8],
['Water', 'Mo', 2],
['Steaks', 'Mo', 4],
['Soap', 'Tu', 1],
['Water', 'Tu', 1],
['Steaks', 'Tu', 7],
['Soap', 'We', 2],
['Water', 'We', 9],
['Steaks', 'We', 7],
['Soap', 'Th', 1],
['Water', 'Th', 5],
['Water', 'Tu', 2],
['Salt', 'Th', 6],
]
p2 = [ # Pivot test table
['Product', 'Day', 'Sold'],
['Soap', 'Mo', 5], # changed value here 8 -> 5
['Soap', 'Mo', 8],
['Water', 'Mo', 2],
['Steaks', 'Mo', 4],
['Soap', 'Tu', 1],
['Water', 'Tu', 1],
['Steaks', 'Tu', 9],# changed value here 7 -> 9
['Soap', 'We', 2],
['Water', 'We', 9],
['Steaks', 'We', 7],
['Soap', 'Th', 1],
['Water', 'Th', 5],
['Water', 'Tu', 2],
['Pepper', 'Th', 6], # changed Salt to Pepper
]
debug=False
pts= PyvoStyles('lightgrey','lightgreen','yellow','lightblue', 'aquamarine')
#pt = Pyvotab(pts, { 'page': 3, 'rows' : [ 3,4 ], 'cols' : [2, 1], 'val' : 5 , 'filter': None, 'pivot': 'plain'}, debug=debug)
#pt = Pyvotab(pts, { 'page': 3, 'rows' : [ 2 ], 'cols' : [3], 'val' : 5 ,'p_rows' : [ 1,2 ], 'p_cols' : [3], 'p_val' : 4 , 'filter': None, 'pivot': 'pivot'}, debug=debug)
#pt = Pyvotab(pts, { 'page': 'all', 'rows' : [ 1,2 ], 'cols' : [3], 'val' : 4 ,'p_rows' : [ 2 ], 'p_cols' : [3], 'p_val' : 5 , 'filter': None, 'pivot': 'pivot'}, debug=debug)
#pt.InsertTable( t5, False, "white")
#pt.InsertTable( t6, True, "white")
pt = Pyvotab(pts, { 'page': 'all', 'rows' : [ 3 ], 'cols' : [2], 'val' : 4 ,'p_rows' : [ 1 ], 'p_cols' : [2], 'p_val' : 3 , 'filter': None, 'pivot': 'cnt'}, debug=debug)
pt.InsertTable( p1, False, "white")
#pt.InsertTable( p2, True, "white")
'''
rowDepth = pt.headerrows()
colDepth = pt.headercols()
print("rowDepth", rowDepth)
print("colDepth", colDepth)
'''
for pyvot_sheet in pt.getPrintDict():
page_name=pyvot_sheet.name
pt_table=pyvot_sheet.table
print("tabellen_name",page_name,pyvot_sheet.style)
print('<table border="1">')
for row in range(pt_table.ySize):
print("<tr>")
for col in range(pt_table.xSize):
try:
cell_content=pt_table[col][row]
except:
print("<td/> ", end='')
continue
if cell_content: # if content is None, then it's an empty filler element, needed to handle multicell html cells correctly
print("<td ", end='')
print('style="background-color:', end='')
print(cell_content["style"]+'" ', end='')
if cell_content["xDir"]:
print('rowspan="',end='')
else:
print('colspan="',end='')
print(str(cell_content["size"])+'" ',end='')
print(">", end='')
if debug:
print(cell_content["value"]+" row:{0}/col:{1}".format(row,col) +"</td>", end='')
else:
print(cell_content["value"] +"</td>", end='')
print("</tr>")
print("</table>")
| 1.851563 | 2 |
league/migrations/0001_initial.py | klauck/mettliga | 1 | 12772735 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-09 08:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MettEater',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=25)),
('last_name', models.CharField(max_length=25)),
],
),
migrations.CreateModel(
name='Metting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('eaters', models.ManyToManyField(to='league.MettEater')),
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='organized_mettings', to='league.MettEater')),
],
),
]
| 1.65625 | 2 |
exprimo/profilers/flops_profiler.py | Lagostra/exprimo | 3 | 12772736 | from paleo.profilers.flops_profiler import FlopsProfiler as PaleoFlopsProfiler
from paleo.profilers.base import ProfilerOptions
class FlopsProfiler:
@staticmethod
def profile(layer_spec, device, backward=False, batch_size=None, comm_penalization=1, comp_penalization=1):
layer = layer_spec.operation
assert layer is not None, f'{layer_spec} has no operation'
if batch_size:
layer.batch_size = batch_size
profiler_options = ProfilerOptions()
direction = 'backward' if backward else 'forward'
profiler_options.direction = direction
profiler_options.use_cudnn_heuristics = False
profiler_options.include_bias_and_activation = False
profiler_options.ppp_comm = comm_penalization
profiler_options.ppp_comp = comp_penalization
profiler = PaleoFlopsProfiler(profiler_options, device)
time = profiler.profile(layer, cross_device_bandwidth=0)
return time.comp_time + time.comm_time
| 2.265625 | 2 |
pytorch2keras/linear_layers.py | ShuangLiu1992/pytorch2keras | 2 | 12772737 | import keras.layers
import numpy as np
import random
import string
import tensorflow as tf
from .common import random_string
def convert_gemm(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert Linear.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting Linear ...')
if names == 'short':
tf_name = 'FC' + random_string(6)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
bias_name = '{0}.bias'.format(w_name)
weights_name = '{0}.weight'.format(w_name)
W = weights[weights_name].numpy().transpose()
input_channels, output_channels = W.shape
keras_weights = [W]
has_bias = False
if bias_name in weights:
bias = weights[bias_name].numpy()
keras_weights = [W, bias]
has_bias = True
dense = keras.layers.Dense(
output_channels,
weights=keras_weights, use_bias=has_bias, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros',
)
layers[scope_name] = dense(layers[inputs[0]])
def convert_matmul(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert matmul layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting matmul ...')
if names == 'short':
tf_name = 'MMUL' + random_string(4)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
if len(inputs) == 1:
weights_name = '{0}.weight'.format(w_name)
W = weights[weights_name].numpy().transpose()
input_channels, output_channels = W.shape
keras_weights = [W]
dense = keras.layers.Dense(
output_channels,
weights=keras_weights, use_bias=False, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros',
)
layers[scope_name] = dense(layers[inputs[0]])
elif len(inputs) == 2:
weights_name = '{0}.weight'.format(w_name)
W = weights[weights_name].numpy().transpose()
input_channels, output_channels = W.shape
keras_weights = [W]
dense = keras.layers.Dense(
output_channels,
weights=keras_weights, use_bias=False, name=tf_name, bias_initializer='zeros', kernel_initializer='zeros',
)
layers[scope_name] = dense(layers[inputs[0]])
else:
raise AssertionError('Cannot convert matmul layer')
| 2.59375 | 3 |
oxe-api/exception/cannot_assign_value_from_parent_category.py | CybersecurityLuxembourg/openxeco | 0 | 12772738 | <filename>oxe-api/exception/cannot_assign_value_from_parent_category.py
class CannotAssignValueFromParentCategory(Exception):
def __init__(self):
super().__init__("422 Cannot assign value from parent category")
| 2.140625 | 2 |
perforce/p4clean.py | nlfiedler/devscripts | 0 | 12772739 | <reponame>nlfiedler/devscripts
#!/usr/bin/env python3
"""Clean up shelved or pending changes.
Python script to clean up old shelved and/or pending Perforce changes owned
by the current user. Use -h to display usage information. This script
requires the P4Python library to be installed. See
http://www.perforce.com/product/components/apis
"""
import argparse
from datetime import datetime, timedelta
import sys
import P4
def delete_changes(p4, args, user, status='shelved'):
"""Delete the shelved or pending changes owned by the current user.
:param p4: Perforce API
:param args: command line arguments
:param user: Perforce user object
:param status: the change status to query for (e.g. 'shelved', 'pending')
"""
week_ago = datetime.now() - timedelta(7)
for change in p4.iterate_changes('-u', user['User'], '-s', status):
try:
date = datetime.strptime(change['Date'], '%Y/%m/%d %H:%M:%S')
except ValueError:
sys.stderr.write('failed to parse date {} in change {}'.format(
change['Date'], change['Change']))
if date <= week_ago or args.all:
if args.delete:
try:
if status == 'shelved':
p4.delete_shelve(change['Change'])
elif status == 'pending':
p4.delete_change(change['Change'])
print("Deleted change {}".format(change['Change']))
except P4.P4Exception as e:
print("Error removing {}: {}".format(change['Change'], e))
else:
print("p4 shelve -dc {}".format(change['Change']))
else:
print("Ignoring recent change {}".format(change['Change']))
def main():
"""Parse command line arguments and do the work."""
desc = '''Removes old shelved or pending changes owned by the current user.
By default, nothing is removed unless -y is passed (a la obliterate).
Any changes made in the last week will be retained, unless -a is given.
'''
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("-a", "--all", action="store_true",
help="remove all shelved changes, even recent ones")
parser.add_argument("-p", "--pending", action="store_true",
help="select for pending changes (vs. shelved)")
parser.add_argument("-y", "--delete", action="store_true",
help="perform the deletion")
parser.add_argument("-c", "--client", metavar="CLIENT",
help="perform the deletion")
args = parser.parse_args()
try:
p4 = P4.P4()
if args.client:
p4.client = args.client
p4.connect()
user = p4.fetch_user()
if user:
if args.pending:
delete_changes(p4, args, user, 'pending')
else:
delete_changes(p4, args, user)
if not args.delete:
print("This was report mode. Use -y to remove changes.")
else:
sys.stderr.write("Cannot retrieve current Perforce user\n")
except P4.P4Exception as e:
sys.stderr.write("error: p4 action failed: {}\n".format(e))
if __name__ == '__main__':
main()
| 2.796875 | 3 |
Program's_Contributed_By_Contributors/AI-Summer-Course/py-master/Basics/Hindi/12_read_write_file/12_read_write_file.py | SDGraph/Hacktoberfest2k21 | 0 | 12772740 | # read file
f=open("funny.txt","r")
for line in f:
print(line)
f.close()
# readlines()
f=open("funny.txt","r")
lines = f.readlines()
print(lines)
# write file
f=open("love.txt","w")
f.write("I love python")
f.close()
# same file when you write i love javascript the previous line goes away
f=open("love.txt","w")
f.write("I love javascript")
f.close()
# You can use append mode to stop having previous lines overwritten
f=open("love.txt","a")
f.write("I love javascript")
f.close()
# show a picture of file open modes (12:12 in old video)
# writelines
f=open("love.txt","w")
f.writelines(["I love C++\n","I love scala"])
f.close()
# with statement
with open("funny.txt","r") as f:
for line in f:
print(line)
# https://www.cricketworldcup.com/teams/india/players/107
player_scores = {}
with open("scores.csv","r") as f:
for line in f:
tokens = line.split(',')
player = tokens[0]
score = int(tokens[1])
if player in player_scores:
player_scores[player].append(score)
else:
player_scores[player] = [score]
print(player_scores)
for player, score_list in player_scores.items():
min_score=min(score_list)
max_score=max(score_list)
avg_score=sum(score_list)/len(score_list)
print(f"{player}==>Min:{min_score}, Max:{max_score}, Avg:{avg_score}")
| 3.78125 | 4 |
django-web-parser/keywords/exceptions.py | sterenczak-marek/django-web-parser | 0 | 12772741 | <reponame>sterenczak-marek/django-web-parser
class NoKeywordsException(Exception):
"""Website does not contains any keywords in <meta> tag"""
pass
class BadURLException(Exception):
"""Website does not exists in a given URL"""
pass
| 2.296875 | 2 |
Src/Lego/PluginBase/PluginBase.py | capaximperii/PyPlugin | 0 | 12772742 | """
The base for all plugins to derive from. It also implements an auto registering pattern so that
the plugins do not have to explicitly register.
"""
import abc
from marshmallow_jsonschema import JSONSchema
from Lego.Datatypes import InputParams
from .decorators import check_chart_configuration
from .decorators import check_input_configuration
from .decorators import check_modes_of_operation
from .decorators import run_async
# Plugin implementation
class PluginBase(metaclass=abc.ABCMeta):
"""
The base class for all plugins that want to register with this application.
"""
plugin_registry = {}
def __init__(self, name, group):
"""
Constructor to initialize basic fields.
"""
self.name = name
self.group = group
self.input_params = InputParams()
def __new__(cls, name, group, *args, **kwargs):
"""
Factory method for base/subtype creation. Simply creates an
(new-style class) object instance and sets a base property.
"""
del args
del kwargs
instance = object.__new__(cls)
# Call base class constructors by default to avoid doing them in each plugin.
super(cls, instance).__init__(name, group)
typedef = cls.__dict__
for attr in typedef:
func = typedef[attr]
if hasattr(func, "__dont_decorate__"):
pass
elif callable(func) and func.__name__ == 'get_input_configuration':
setattr(cls, attr, check_input_configuration(func))
elif callable(func) and func.__name__ == 'get_chart_configuration':
setattr(cls, attr, check_chart_configuration(func))
elif callable(func) and func.__name__ == 'get_modes_of_operation':
setattr(cls, attr, check_modes_of_operation(func))
elif callable(func) and func.__name__ == 'run':
setattr(cls, attr, run_async(func))
if group not in cls.plugin_registry.keys():
cls.plugin_registry[group] = []
cls.plugin_registry[group].append(instance)
return instance
@classmethod
def get_plugins(cls):
"""
Gets the list of all plugins registered.
"""
return cls.plugin_registry
@classmethod
def get_plugins_group(cls, group):
"""
Gets plugins registered under a single group name.
"""
if not group in cls.plugin_registry.keys():
return None
return cls.plugin_registry[group]
def get_plugin_name(self):
"""
returns the plugin name.
"""
return self.name
def get_plugin_group(self):
"""
returns plugin family name.
"""
return self.group
def get_input_configuration(self):
"""
Get name and type json value for input parameters required by this plugin to operate.
"""
json_schema = JSONSchema()
schema_blue_print = self.input_params.generate_schema(self.name + 'InputParams')
schema_desc = schema_blue_print()
return json_schema.dump(schema_desc).data
@abc.abstractmethod
def get_chart_configuration(self):
"""
Get name and type json value for chart display for this plugin.
"""
return None
@abc.abstractmethod
def get_modes_of_operation(self):
"""
Get supported modes of operation online or offline
"""
return ['online', 'offline']
@abc.abstractmethod
def run(self):
"""
Run method to call for the plugin processing.
"""
print("Running abstract method")
return
| 2.90625 | 3 |
get_unused_macros.py | 1ethanhansen/unused-c-finder | 0 | 12772743 | <filename>get_unused_macros.py
#!/usr/bin/env python
# Author: <NAME> <<EMAIL>>
# For finding unused macros in c code
#
# Assumptions:
# cscope and ctags installed
# cscope has been used to generate cscope.out for your project
# cscope.out exists in the current directory
#
# Usage:
# enter the full path to the file or files you want to check
# eg: /home/ethan/git/linux-rcu/kernel/rcu
# OR: /home/ethan/git/linux-rcu/kernel/rcu/rcu.h
# if you give a dir, it will search all the .c and .h files there
# Import modules for CLI calling
import subprocess
import sys
file_var_dict = {}
# Exit the program if no file to parse given
if (len(sys.argv) == 1):
sys.exit("ERROR: no file loc given")
file_name = sys.argv[1]
# find .c and .h files in the directory given, and for each file exec
# ctags looking for macro names and for each macro only print
# (or in this case return) the macro name and the file location
variables_files_byte = subprocess.check_output(
["find {} -type f -name '*.[ch]' -exec ctags -x --c-kinds=d {{}} ';' \
| awk '{{print $1 $4}}'".format(file_name)], shell=True)
variables_decoded = variables_files_byte.decode()
variables_files_list = variables_decoded.split("\n")
min_len = 10
min_list = []
for item in list(set(variables_files_list))[1:]:
# Split the string combo of macro name and path it came from
# into the macro and the last three parts of the part
split_up_loc = item.split("/")
var_name = split_up_loc[0]
final_file_name = "/".join(split_up_loc[-3:])
# Some sanity checks for empty and reserved vars
if len(var_name) == 0:
continue
if var_name[0] == "_":
continue
try:
# Use cscope to find the C symbol in code
out = subprocess.check_output(
["cscope -d -f cscope.out -R -L0 {}".format(var_name)], shell=True)
except:
continue
# Make the output a usable list of vars & the file they came from
out_decoded = out.decode()
out_list = out_decoded.split("\n")
used = False
# If all we found was declarations and assignments
if len(out_list) < 3:
# Check to make sure we can append to an entry that exists
if final_file_name in file_var_dict:
file_var_dict[final_file_name].append(var_name)
else: # If doesn't exist, create new file:var_list pair
file_var_dict[final_file_name] = [var_name]
# Open the output file and write as markdown
with open("unneeded-macros.md", "w+") as unneeded_file:
for source_file, var_list in file_var_dict.items():
unneeded_file.write("# {}\n".format(source_file))
for var in var_list:
unneeded_file.write("* {}\n".format(var))
| 2.546875 | 3 |
tests/test_app.py | simba999/dawdle | 0 | 12772744 | <filename>tests/test_app.py
from tests.test_base import TestBase
class TestApp(TestBase):
def test_404(self):
response = self.client.get('/this/page/does/not/exist')
assert response.status_code == 404
assert b'Not Found' in response.data
| 2.734375 | 3 |
monitor_xfconf_changes/xfconf.py | jamescherti/monitor-xfconf-changes | 3 | 12772745 | #!/usr/bin/env python
#
# Copyright (c) 2021 <NAME>
# URL: https://github.com/jamescherti/monitor-xfconf-changes/
#
# Distributed under terms of the MIT license.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# --
# pylint: disable=invalid-name
#
"""This command-line tool will help you to configure XFCE 4 programmatically.
It will display the xfconf-query commands of all the Xfconf settings that
are bring modified by xfce4-settings-manager (or by any other software that
modifies Xfconf like Thunar, Catfish, Ristretto...).
You can then add the xfconf-query commands to a Shell script that you can use
to configure XFCE 4 programmatically.
"""
import os
import signal
from typing import Set, Union, Any
from pathlib import Path
import psutil
from lxml import etree as ETree
__author__ = "<NAME>"
__license__ = "MIT"
class XfconfError(Exception):
"""Exception raised by the class Xfconf() or its children."""
class XfconfItem:
"""Xfconf item."""
def __init__(self,
channel: str,
property_path: str,
property_type: str,
property_value: Union[str, list]):
"""Init the class."""
self.channel = channel
self.property_path = property_path
self.property_type = property_type
self.property_value = property_value
def __repr__(self) -> str:
"""Object representation in string format."""
result = "{}{} : {} = {}".format(self.channel,
self.property_path,
self.property_type,
self.property_value)
return result
class Xfconf:
"""Load Xfconf settings."""
@staticmethod
def escape_command(command: str) -> str:
"""Quote a command."""
return "'{}'".format(command.replace("'", "'\\''"))
@staticmethod
def reload_xfconfd():
"""Reload the process 'xfconfd'."""
for proc in psutil.process_iter():
try:
if proc.name() == "xfconfd":
# reload the process
os.kill(proc.pid, signal.SIGHUP)
except psutil.Error:
pass
def __init__(self):
"""Load Xfconf settings."""
self.xfconf_items: set = set()
dir_xfconf = Path("~/.config/xfce4/xfconf/xfce-perchannel-xml")
for xml_file in dir_xfconf.expanduser().glob("*.xml"):
self._parse_xfconf_perchannel_xml(str(xml_file))
def diff(self) -> Set[str]:
"""Return the settings that have been changed."""
Xfconf.reload_xfconfd()
new_xfce_config = Xfconf()
before = set(str(self).splitlines())
after = set(str(new_xfce_config).splitlines())
self.xfconf_items = new_xfce_config.xfconf_items
return after - before
def __iter__(self):
"""Iterate through 'self.items'."""
yield from self.xfconf_items
def __repr__(self) -> str:
"""Object representation in string format."""
commands = []
for item in self:
cmd = "{} --create -c {} -p {}" \
.format("xfconf-query",
self.escape_command(item.channel),
self.escape_command(item.property_path))
if item.property_type == "array":
for array_item_type, array_item_value in item.property_value:
cmd = "{} --type {} --set {}".format(
cmd,
self.escape_command(array_item_type),
self.escape_command(array_item_value)
)
else:
cmd = "{} --type {} --set {}".format(
cmd,
self.escape_command(item.property_type),
self.escape_command(str(item.property_value))
)
commands.append(cmd)
return "{}\n".format("\n".join(commands))
def _parse_xfconf_perchannel_xml(self,
xml_file: str,
root: Any = None,
channel_name: str = "",
property_path: str = ""):
"""Parse the Xfconf XML."""
if root is None:
tree = ETree.parse(xml_file)
root = tree.getroot()
channel_name = root.attrib.get("name")
if root.attrib.get("version") != "1.0" \
or root.tag.lower() != "channel":
err_str = ("invalid XML file: '{}'").format(xml_file)
raise XfconfError(err_str)
for elem in root.getchildren():
if elem.tag.title().lower() != "property":
continue
property_type = elem.attrib.get("type").strip().lower()
cur_property_path = "{}/{}".format(property_path,
elem.attrib.get("name").strip())
if property_type not in ["empty", "uint", "int", "string", "bool",
"array", "double"]:
err_str = ("the type '{}' of '{}{}' is not supported. "
"XML file: '{}'") \
.format(property_type, channel_name, cur_property_path,
xml_file)
raise XfconfError(err_str)
# 'empty' = contains sub items
if property_type == "empty":
self._parse_xfconf_perchannel_xml(
xml_file=xml_file,
root=elem,
channel_name=channel_name,
property_path=cur_property_path
)
continue
# Modify the variable property_value
if property_type == "array":
property_value = []
for elem_property_value in elem.getchildren():
array_item_type = elem_property_value.attrib.get("type")
array_item_value = elem_property_value.attrib.get("value")
if array_item_value is None:
array_item_value = ""
property_value.append((array_item_type, array_item_value))
else:
property_value = elem.attrib.get("value")
self.xfconf_items.add(
XfconfItem(
channel=channel_name,
property_path=cur_property_path,
property_type=property_type,
property_value=property_value
)
)
| 1.367188 | 1 |
state_manager/event_processors/aiogram.py | Bloodielie/state_manager | 4 | 12772746 | <gh_stars>1-10
from typing import Optional
from aiogram import Dispatcher
from aiogram.dispatcher.middlewares import BaseMiddleware
from aiogram.types import CallbackQuery, Message
from aiogram.types.base import TelegramObject
from state_manager import BaseStorage, BaseStateManager
from pyject import DependencyNotFound
from state_manager.event_processors.base import BaseEventProcessor
from state_manager.models.state_managers.aiogram import AiogramStateManager
from state_manager.storage_settings import StorageSettings
from state_manager.storages import redis
from state_manager.storages.state_storage import StateStorage
from state_manager.types.aiogram import aiogram_context
from state_manager.types.generals import BaseContainer
from state_manager.utils.search import get_state_handler_and_run
from state_manager.utils.utils import get_state_name
class AiogramEventProcessor(BaseEventProcessor, BaseMiddleware):
def __init__(
self,
state_storage: StateStorage,
container: BaseContainer,
state_data_storage: Optional[BaseStorage] = None,
default_state_name: Optional[str] = None,
) -> None:
self._state_storage = state_storage
self._state_data_storage = state_data_storage or redis.RedisStorage(StorageSettings())
self._default_state_name = default_state_name or "home"
self._container = container
super().__init__()
async def on_post_process_message(self, message: Message, _, data: dict) -> None:
if data.get("state") or data.get("raw_state") or data.get("command"):
return
handler_result = await self.post_process_handlers(message, "message", data)
if handler_result is not None and isinstance(handler_result, str):
await message.answer(handler_result)
async def on_post_process_callback_query(self, callback_query: CallbackQuery, _, data: dict) -> None:
if data.get("state") or data.get("raw_state") or data.get("command"):
return
await self.post_process_handlers(callback_query, "callback_query", data)
async def on_post_process_edited_message(self, callback_query: CallbackQuery, _, data: dict) -> None:
if data.get("state") or data.get("raw_state") or data.get("command"):
return
await self.post_process_handlers(callback_query, "edited_message", data)
async def post_process_handlers(self, ctx: aiogram_context, event_type: str, data: Optional[dict] = None) -> None:
self._container.add_context(TelegramObject, ctx)
if data is not None:
for value in data.values():
self._container.add_context(value.__class__, value)
try:
implementation_ = self._container.get(BaseStorage)
state_manager = AiogramStateManager(storage=implementation_, context=ctx)
self._container.add_context(BaseStateManager, state_manager)
except DependencyNotFound:
pass
state_name = await self._get_user_state_name(ctx)
return await get_state_handler_and_run(self._state_storage, self._container, state_name, event_type)
@classmethod
def install(
cls,
dispatcher: Dispatcher,
state_storage: StateStorage,
container: BaseContainer,
state_data_storage: Optional[BaseStorage] = None,
default_state_name: Optional[str] = None,
) -> None:
dispatcher.middleware.setup(cls(state_storage, container, state_data_storage, default_state_name))
async def _get_user_state_name(self, ctx: aiogram_context) -> str:
user_id = ctx.from_user.id
return await get_state_name(user_id, self._state_data_storage, self._default_state_name)
| 2.0625 | 2 |
gizmo_bringup/nodes/arm_driver.py | peterheim1/gizmo | 0 | 12772747 | <filename>gizmo_bringup/nodes/arm_driver.py<gh_stars>0
#!/usr/bin/env python
'''
Created March, 2017
@author: <NAME>
arm_driver.py - gateway to Arduino based arm controller
Copyright (c) 2011 <NAME>. All right reserved.
Borrowed heavily from M<NAME>uson's ArbotiX base_controller.py code.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Vanadium Labs LLC nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL VANADIUM LABS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import rospy
import tf
import math
from math import sin, cos, pi, radians, degrees
import sys
import time
from std_msgs.msg import String
from std_msgs.msg import Float64, Float32
from dynamixel_msgs.msg import JointState as JointStateDY
from sensor_msgs.msg import JointState
from SerialDataGateway import SerialDataGateway
class Arm_Driver(object):
'''
Helper class for communicating with an arduino board over serial port
'''
def _HandleReceivedLine(self, line):
self._Counter = self._Counter + 1
#rospy.logwarn(str(self._Counter) + " " + line)
#if (self._Counter % 50 == 0):
self._SerialPublisher.publish(String(str(self._Counter) + ", in: " + line))
if (len(line) > 0):
lineParts = line.split('\t')
if (lineParts[0] == 'j1'):
self._Broadcast_Left_Lift_Joint(lineParts)
return
if (lineParts[0] == 'j2'):
self._Broadcast_Left_Rotate_Joint(lineParts)
return
if (lineParts[0] == 'j3'):
self._Broadcast_Right_Lift_Joint(lineParts)
return
if (lineParts[0] == 'j4'):
self._Broadcast_Right_Rotate_Joint(lineParts)
return
if (lineParts[0] == 'j5'):
self._Broadcast_left_gripper_Joint(lineParts)
return
if (lineParts[0] == 'j6'):
self._Broadcast_Right_gripper_Joint(lineParts)
return
if (lineParts[0] == 'j7'):
self._Broadcast_left_arm_shoulder_roll_Joint(lineParts)
return
def _Broadcast_Left_Lift_Joint(self, lineParts):
partsCount = len(lineParts)
#rospy.logwarn(P1)
if (partsCount < 2):
pass
try:
P1 = (0 + (radians(float(lineParts[1]))))-1.05
Joint_State = JointStateDY()
Joint_State.name = "left_lift_joint"
Joint_State.current_pos = P1
Joint_State.header.stamp = rospy.Time.now()
self._P2_JointPublisher.publish(Joint_State)
#rospy.logwarn(Joint_State)
except:
rospy.logwarn("Unexpected error:left_lift_joint" + str(sys.exc_info()[0]))
def _Broadcast_Left_Rotate_Joint(self, lineParts):
partsCount = len(lineParts)
#rospy.logwarn(partsCount)
if (partsCount < 2):
pass
try:
P1 = (0 - (radians(float(lineParts[1])))) +1.57
Joint_State = JointStateDY()
Joint_State.name = "left_rotate_joint"
Joint_State.current_pos = P1
Joint_State.header.stamp = rospy.Time.now()
self._P3_JointPublisher.publish(Joint_State)
#rospy.logwarn(Joint_State)
except:
rospy.logwarn("Unexpected error:left_rotate_joint" + str(sys.exc_info()[0]))
def _Broadcast_Right_Lift_Joint(self, lineParts):
partsCount = len(lineParts)
#rospy.logwarn(partsCount)
if (partsCount < 2):
pass
try:
P1 = (0 +(radians(float(lineParts[1]))))-1.05
Joint_State = JointStateDY()
Joint_State.name = "right_lift_joint"
Joint_State.current_pos = P1
Joint_State.header.stamp = rospy.Time.now()
self._P6_JointPublisher.publish(Joint_State)
#rospy.logwarn(p1)
except:
rospy.logwarn("Unexpected error:left_lift_joint" + str(sys.exc_info()[0]))
def _Broadcast_Right_Rotate_Joint(self, lineParts):
partsCount = len(lineParts)
#rospy.logwarn(partsCount)
if (partsCount < 2):
pass
try:
P1 = 0 - (radians(float(lineParts[1])))+ 1.57
Joint_State = JointStateDY()
Joint_State.name = "right_rotate_joint"
Joint_State.current_pos = P1
Joint_State.header.stamp = rospy.Time.now()
self._P5_JointPublisher.publish(Joint_State)
#rospy.logwarn(Joint_State)
except:
rospy.logwarn("Unexpected error:right_rotate_joint" + str(sys.exc_info()[0]))
def _Broadcast_left_gripper_Joint(self, lineParts):
partsCount = len(lineParts)
#rospy.logwarn(partsCount)
if (partsCount < 2):
pass
try:
P1 = 0 - (radians(float(lineParts[1])))+ 1.57
Joint_State = JointStateDY()
Joint_State.name = "left_gripper_joint"
Joint_State.current_pos = P1
Joint_State.header.stamp = rospy.Time.now()
self._P7_JointPublisher.publish(Joint_State)
#rospy.logwarn(Joint_State)
except:
rospy.logwarn("Unexpected error:left_gripper_joint" + str(sys.exc_info()[0]))
def _Broadcast_Right_gripper_Joint(self, lineParts):
partsCount = len(lineParts)
#rospy.logwarn(partsCount)
if (partsCount < 2):
pass
try:
P1 = 0 - (radians(float(lineParts[1])))+ 1.57
Joint_State = JointStateDY()
Joint_State.name = "right_gripper_joint"
Joint_State.current_pos = P1
Joint_State.header.stamp = rospy.Time.now()
self._P8_JointPublisher.publish(Joint_State)
#rospy.logwarn(Joint_State)
except:
rospy.logwarn("Unexpected error:right_gripper_joint" + str(sys.exc_info()[0]))
def _Broadcast_left_arm_shoulder_roll_joint(self, lineParts):
partsCount = len(lineParts)
#rospy.logwarn(partsCount)
if (partsCount < 2):
pass
try:
P1 = (0 +(radians(float(lineParts[1]))))
Joint_State = JointStateDY()
Joint_State.name = "left_arm_shoulder_roll_joint"
Joint_State.current_pos = P1
Joint_State.header.stamp = rospy.Time.now()
self._P1_JointPublisher.publish(Joint_State)
#rospy.logwarn(p1)
except:
rospy.logwarn("Unexpected error:left_arm_shoulder_roll_joint" + str(sys.exc_info()[0]))
def _WriteSerial(self, message):
self._SerialPublisher.publish(String(str(self._Counter) + ", out: " + message))
self._SerialDataGateway.Write(message)
def __init__(self,):
'''
Initializes the receiver class.
port: The serial port to listen to.
baudrate: Baud rate for the serial communication
'''
self.rate = rospy.get_param("~rate", 100.0)
self.fake = rospy.get_param("~sim", False)
self._Counter = 0
rospy.init_node('gizmos')
port = rospy.get_param("~port", "/dev/ttyACM0")
baudRate = int(rospy.get_param("~baudRate", 115200))
rospy.logwarn("Starting arm controller with serial port: " + port + ", baud rate: " + str(baudRate))
# subscriptions
rospy.Subscriber('right_lift_joint/command',Float64, self._HandleJoint_2_Command)
rospy.Subscriber('right_rotate_joint/command',Float64, self._HandleJoint_3_Command)
rospy.Subscriber('left_lift_joint/command',Float64, self._HandleJoint_6_Command)
rospy.Subscriber('left_rotate_joint/command',Float64, self._HandleJoint_7_Command)
rospy.Subscriber('left_gripper_joint/command',Float64, self._HandleJoint_8_Command)
rospy.Subscriber('right_gripper_joint/command',Float64, self._HandleJoint_9_Command)
rospy.Subscriber('left_arm_shoulder_roll_joint/command',Float64, self._HandleJoint_1_Command)
self._SerialPublisher = rospy.Publisher('arm_serial', String, queue_size=5)
self._P2_JointPublisher = rospy.Publisher('left_lift_joint/state', JointStateDY, queue_size=5)
self._P3_JointPublisher = rospy.Publisher("left_rotate_joint/state", JointStateDY, queue_size=5)
self._P5_JointPublisher = rospy.Publisher("right_rotate_joint/state", JointStateDY, queue_size=5)
self._P6_JointPublisher = rospy.Publisher("right_lift_joint/state", JointStateDY, queue_size=5)
self._P7_JointPublisher = rospy.Publisher("left_gripper_finger_joint/state", JointStateDY, queue_size=5)
self._P8_JointPublisher = rospy.Publisher("right_gripper_finger_joint/state", JointStateDY, queue_size=5)
self._P1_JointPublisher = rospy.Publisher("left_arm_shoulder_roll_joint/state", JointStateDY, queue_size=5)
self._SerialDataGateway = SerialDataGateway(port, baudRate, self._HandleReceivedLine)
def Start(self):
rospy.loginfo("Starting start function")
self._SerialDataGateway.Start()
message = 's \r'
self._WriteSerial(message)
def Stop(self):
rospy.loginfo("Stopping")
message = 'r \r'
self._WriteSerial(message)
sleep(5)
self._SerialDataGateway.Stop()
def _HandleJoint_1_Command(self, Command):
""" Handle movement requests.
left_arm_shoulder_roll_joint
send message in degrees 0 -180
"""
v = Command.data # angel request in radians
self.right_lift = v
v1 = (int(degrees(v))+ 90)
message = 'g %d \r' % (v1)
rospy.logwarn("Sending left_arm_shoulder_roll_joint command: " + (message))
self._WriteSerial(message)
def _HandleJoint_2_Command(self, Command):
""" Handle movement requests.
right_lift_joint
send message in degrees 0 -180
"""
v = Command.data # angel request in radians
self.right_lift = v
v1 = (int(degrees(v))+ 90)
message = 'b %d \r' % (v1)
rospy.logwarn("Sending right_lift_joint command: " + (message))
self._WriteSerial(message)
def _HandleJoint_3_Command(self, Command):
""" Handle movement requests.
right_rotate_joint
send message in degrees 0 -180
"""
v = Command.data # angel request in radians
self.right_rotate = v
v1 =int(degrees(v))+ 90
message = 'a %d \r' % (v1)
rospy.logwarn("Sending right_rotate_joint command: " + (message))
self._WriteSerial(message)
def _HandleJoint_6_Command(self, Command):
""" Handle movement requests.
left_lift_joint
send message in degrees 0 -180
"""
v = Command.data # angel request in radians
self.left_rotate = v
v1 =int(degrees(v)) + 90
message = 'd %d \r' % (v1)
rospy.logwarn("Sending left_lift_joint command : " + (message))
self._WriteSerial(message)
def _HandleJoint_7_Command(self, Command):
""" Handle movement requests.
left_rotatejoint
send message in degrees 0 -180
"""
v = Command.data # angel request in radians
self.right_elbow = v
v1 =int(degrees(v))+ 90
message = 'c %d \r' % (v1)
rospy.logwarn("Sending left_rotate_joint command: " + (message))
self._WriteSerial(message)
def _HandleJoint_8_Command(self, Command):
""" Handle movement requests.
left_gripper_joint
send message in degrees 0 -180
"""
v = Command.data # angel request in radians
self.left_gripper = v
v1 =int(degrees(v))+ 90
message = 'e %d \r' % (v1)
rospy.logwarn("Sending left_gripper_joint command: " + (message))
self._WriteSerial(message)
def _HandleJoint_9_Command(self, Command):
""" Handle movement requests.
right gripper_joint
send message in degrees 0 -180
"""
v = Command.data # angel request in radians
self.right_gripper = v
v1 =int(degrees(v))+ 90
message = 'f %d \r' % (v1)
rospy.logwarn("Sending right_grippere_joint command: " + (message))
self._WriteSerial(message)
if __name__ == '__main__':
r_shoulder = Arm_Driver()
try:
r_shoulder.Start()
rospy.spin()
except rospy.ROSInterruptException:
r_shoulder.Stop()
| 1.734375 | 2 |
FromUser.py | emportent/ToxWitter | 1 | 12772748 |
# ==========================================================================================================================================================
#import the libraries
# ==========================================================================================================================================================
import tweepy
import re
import matplotlib.pyplot as plt
from tweepy import OAuthHandler
from textblob import TextBlob
import numpy as np
# ==========================================================================================================================================================
#initialize the keys
# ==========================================================================================================================================================
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxx'
consumer_secret = '<KEY>'
access_token = '<KEY>'
access_secret = '<KEY>'
# ==========================================================================================================================================================
#initialize the tokens
# ==========================================================================================================================================================
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_secret)
api = tweepy.API(auth,timeout=10)
# ==========================================================================================================================================================
#function to clean the tweets: standard procedure
# ==========================================================================================================================================================
def clean_tweet(tweet):
return ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split())
#function to get the tweets
def get_tweet_sentiment(tweet):
analysis = TextBlob(clean_tweet(tweet))
if analysis.sentiment.polarity > 0:
return 'positive'
elif analysis.sentiment.polarity == 0:
return 'neutral'
else:
return 'negative'
#function to get the tweets
def get_tweets(query, count = 10):
tweets = []
try:
fetched_tweets = api.search(q = query, count = count)
for tweet in fetched_tweets:
parsed_tweet = {}
parsed_tweet['text'] = tweet.text
parsed_tweet['sentiment'] = get_tweet_sentiment(tweet.text)
if tweet.retweet_count > 0:
if parsed_tweet not in tweets:
tweets.append(parsed_tweet)
else:
tweets.append(parsed_tweet)
return tweets
except tweepy.TweepError as e:
print("Error : " + str(e))
# ==========================================================================================================================================================
#function to get the tweets and plot the graph
# ==========================================================================================================================================================
def main(queryname):
tweets = get_tweets(queryname, count = 200)
ptweets = [tweet for tweet in tweets if tweet['sentiment'] == 'positive']
print("Positive tweets percentage: {} %".format(100*len(ptweets)/len(tweets)))
ntweets = [tweet for tweet in tweets if tweet['sentiment'] == 'negative']
print("Negative tweets percentage: {} %".format(100*len(ntweets)/len(tweets)))
print("Neutral tweets percentage: {} % ".format(100*(len(tweets) - len(ntweets) - len(ptweets))/len(tweets)))
print("\n\nPositive tweets:")
for tweet in ptweets[:10]:
print(tweet['text'])
print("\n\nNegative tweets:")
for tweet in ntweets[:10]:
print(tweet['text'])
objects = ['Positive','Negative','Neutral']
y_pos = np.arange(len(objects))
performance = [100*len(ptweets)/len(tweets),100*len(ntweets)/len(tweets),100*(len(tweets) - len(ntweets) - len(ptweets))/len(tweets)]
plt.bar(y_pos, performance, align='center', alpha=0.5)
plt.xticks(y_pos, objects)
plt.ylabel('Percentage')
plt.title('Sentiment')
plt.show()
# ==========================================================================================================================================================
# USAGE
# ==========================================================================================================================================================
main("PyTorch")
| 1.851563 | 2 |
calc.py | rixwhite/calculator | 0 | 12772749 | import pygame, sys
from pygame.locals import *
from math import cos, sin, sqrt, tan, pi
# Initialize pygame
pygame.init()
pygame.display.set_caption('Calculator')
clock = pygame.time.Clock()
SURF = pygame.display.set_mode((450, 550))
font = pygame.font.SysFont(None, 30)
calc = pygame.font.SysFont('ocraextended', 25)
FPS = 60
WHITE = (255, 255, 255)
BLUE = (0, 0, 120)
BLACK = (0, 0, 0)
GREEN = (36, 204, 68)
mouse_pos = (0, 0)
equation = ''
y = 0
btn_width = 45
screen = pygame.Rect(50, 50, 300, 50)
mouse = pygame.draw.rect(SURF, WHITE, Rect(mouse_pos, (1, 1)))
text, pos, rect, face, text_rect = 'text', 'pos', 'rect', 'face', 'text_rect'
buttons = {
'btn_clear': {text: 'C', pos: (100, 150)},
'btn_bksp': {text: '<x', pos: (150, 150)},
'btn_left': {text: '(', pos: (200, 150)},
'btn_right': {text: ')', pos: (250, 150)},
'btn_7': {text: '7', pos: (100, 200)},
'btn_8': {text: '8', pos: (150, 200)},
'btn_9': {text: '9', pos: (200, 200)},
'btn_divide': {text: '/', pos: (250, 200)},
'btn_4': {text: '4', pos: (100, 250)},
'btn_5': {text: '5', pos: (150, 250)},
'btn_6': {text: '6', pos: (200, 250)},
'btn_multiply': {text: '*', pos: (250, 250)},
'btn_1': {text: '1', pos: (100, 300)},
'btn_2': {text: '2', pos: (150, 300)},
'btn_3': {text: '3', pos: (200, 300)},
'btn_minus': {text: '-', pos: (250, 300)},
'btn_decimal': {text: '.', pos: (100, 350)},
'btn_0': {text: '0', pos: (150, 350)},
'btn_equals': {text: '=', pos: (200, 350)},
'btn_plus': {text: '+', pos: (250, 350)},
'btn_cos': {text: 'cos(', pos: (100, 400)},
'btn_tan': {text: 'tan(', pos: (150, 400)},
'btn_sin': {text: 'sin(', pos: (200, 400)},
'btn_sqrt': {text: 'sqrt(', pos: (250, 400)},
'btn_pi': {text: 'pi', pos: (100, 450)},
'btn_modulo': {text: '%', pos: (150, 450)}
}
keys = "1234567890."
for button in buttons:
b_pos = buttons[button][pos]
b_text = buttons[button][text]
# Create a rectangle object and store it in the dict
rect_params = list(b_pos)
rect_params.extend((btn_width, btn_width))
b_rectangle = pygame.Rect(rect_params)
buttons[button][rect] = b_rectangle
# Create a "face" and store it in the dict
if b_text[-1] == '(' and b_text[0] != '(':
b_text = b_text[:-1] # trim the trailing paren off the math functions
b_face = font.render(b_text, True, WHITE)
b_text_rect = b_face.get_rect()
b_text_rect.center = b_rectangle.center # center the text in the rect
buttons[button][face] = b_face
buttons[button][text_rect] = b_text_rect
while True:
try:
for event in pygame.event.get():
if event.type == pygame.MOUSEMOTION:
mouse_pos = event.pos
if event.type == pygame.MOUSEBUTTONDOWN:
for button in buttons:
if mouse.colliderect(buttons[button][rect]):
current_button = buttons[button][text]
if current_button == '=' and equation == '':
equation = ''
elif current_button == '=':
equation = f"{eval(equation)}"
elif current_button == 'C':
equation = ''
elif current_button == '<x':
equation = equation[:-1]
elif current_button == 'pi':
equation += str(pi)
else:
equation += buttons[button][text]
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
mods = pygame.key.get_mods()
if event.key == K_c:
equation = ''
elif event.key == K_EQUALS and mods & pygame.KMOD_SHIFT:
equation += '+'
elif event.key == K_MINUS:
equation += '-'
elif event.key == K_SLASH:
equation += '/'
elif event.key == K_8 and mods & pygame.KMOD_SHIFT:
equation += '*'
elif event.key == K_5 and mods & pygame.KMOD_SHIFT:
equation += '%'
elif event.key == K_9 and mods & pygame.KMOD_SHIFT:
equation += '('
elif event.key == K_0 and mods & pygame.KMOD_SHIFT:
equation += ')'
elif event.key == K_EQUALS or event.key == K_RETURN:
equation = f"{eval(equation)}"
elif event.key == K_BACKSPACE:
equation = equation[:-1]
elif pygame.key.name(event.key) in keys:
equation += pygame.key.name(event.key)
SURF.fill(WHITE)
mouse = pygame.draw.rect(SURF, WHITE, Rect(mouse_pos, (1, 1)))
pygame.draw.rect(SURF, BLACK, screen, 0)
for button in buttons:
pygame.draw.rect(SURF, BLUE, buttons[button][rect], 0)
SURF.blit(buttons[button][face], buttons[button][text_rect])
equation_text = calc.render(equation, True, GREEN)
equation_rect = equation_text.get_rect()
equation_rect.centery = screen.centery
equation_rect.right = screen.right - 5
SURF.blit(equation_text, equation_rect)
clock.tick(FPS)
pygame.display.update()
except SyntaxError:
equation = 'ERROR'
except NameError:
equation = 'ERROR'
except ZeroDivisionError:
equation = 'ERROR'
| 2.921875 | 3 |
reserved/forms.py | Strangemother/coffee | 0 | 12772750 | from django import forms
from django.forms import ModelForm
from models import Venue, Location
from django.forms.models import inlineformset_factory
class VenueForm(ModelForm):
def clean(self):
super(VenueForm, self).clean() #I would always do this for forms.
return self.cleaned_data
full_address = forms.CharField(label='Full Address', \
help_text='Type a new address')
latlng = forms.CharField(label='Lat/Lng', \
help_text='Lat Long of the address')
class Meta:
model = Venue
| 2.40625 | 2 |