repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
KyleKing/recipes | tests/configuration.py | Python | mit | 758 | 0 | """Global variables for testing."""
from pathlib import Path
from calcipy.file_helpers import delete_dir, ensure_dir
from calcipy.log_helpers import activate_debug_logging
from recipes import __pkg_name__
activate_debug_logging(pkg_names=[__pkg_name__], clear_log=True)
TEST_DIR = Path(__file__).resolve().parent
"""Path to the `test` directory that contains this file and all other tests."""
TEST_DATA_DIR = TEST_DIR / 'data'
"""Path to subdirectory with test data within t | he Test Directory."""
TEST_TMP_CACHE = TEST_DIR / '_tmp_cache'
"""Path to the temporary cache folder in the Test directory."""
def clear_test_cache() -> None:
"""Remove the test cache directory if present."""
delete_dir(TEST_ | TMP_CACHE)
ensure_dir(TEST_TMP_CACHE)
|
rootAir/rootAir | finance/type_launch.py | Python | gpl-2.0 | 1,196 | 0.002508 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from datetime import datetime
from django.contrib.contenttypes.models import *
from .exceptions import IncorrectCellLabel
from utils.util import *
from django.db.models import Sum, Max
from django.contrib import messages
from django.db import transaction
from django.conf import setting | s
import os
class TypeLaunch(models.Model):
# id = models.IntegerField(primary_key=True) # AutoField?
type_name = models.CharField(max_length=100, unique=True)
cost_fixo = models.BooleanField(default=False, db_index=True)
investment = models.BooleanField(default=False, db_index=True)
value_fixed = models | .DecimalField(max_digits=8, decimal_places=2)
synchronized = models.CharField(max_length=1, choices=STATUS_CHOICES)
def __str__(self):
return self.type_name
class Meta:
managed = False
db_table = 'finance_typelaunch'
def save(self, *args, **kwargs):
if settings.DATABASE_LOCAL:
self.synchronized = 'L' #local
else:
self.synchronized = 'H' #heroku
super(self.__class__, self).save(*args, **kwargs) |
hsanjuan/dccpi | setup.py | Python | gpl-3.0 | 2,524 | 0 | """
Copyright (C) 2016 Hector Sanjuan
This file is part of "dccpi".
"dccpi" is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
"dccpi" is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with "dccpi". If not, see <http://www.gnu.org/licenses/>.
"""
from setuptools import setup, Extension
setup(
name="dccpi",
packages=["dccpi"],
version="1.3.1",
description="A Python NMRA DCC protocol implementation for RaspberryPi",
author="Hector Sanjuan",
author_email="code@hector.link",
url="https://github.com/hsanjuan/dccpi",
download_url="",
license="GNU General Public License v3 (GPLv3)",
keywords=["dcc", "nrma", "pi", "raspberry", "modelling",
"train", "decoder", "command", "station", "loco",
"locomotive"],
install_requires=[
'bitstring',
],
ext_modules=[
Extension('dcc_rpi_encoder_c',
sources=['extensions/dcc_rpi_encoder_c.c'],
libraries=['wiringPi'])
],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Development Status :: 4 - Beta",
"Environment :: Plugins",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Topic :: System :: Networking",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description="""\
==================================================================
NRMA Digital Command Control (DCC) | implementation for Raspberry Pi
==================================================================
This module implements the DCC pr | otocol for controlling model trains using a
Raspberry Pi.
It is able to output direction and speed DCC-encoded packets on one of the
RPi GPIO pins. It needs WiringPi libraries to work.
Please visit the github page for more information:
https://github.com/hsanjuan/dccpi.
"""
)
|
SimenB/thefuck | thefuck/output_readers/shell_logger.py | Python | mit | 1,616 | 0 | import json
import os
import socket
try:
from shutil import get_terminal_size
except ImportError:
from backports.shutil_get_terminal_size import get_terminal_size
import pyte
from .. import const, logs
def _get_socket_path():
return os.environ.get(const.SHELL_LOGGER_SOCKET_ENV)
def is_available():
"""Returns `True` if shell logger socket available.
:rtype: book
"""
path = _get_socket_path()
if not path:
return False
return os.path.exists(path)
def _get_last_n(n):
with socket.socket(socket.AF_UNIX) as client:
client.connect(_get_socket_path())
request = json.dumps({
"type": "list",
"count": n,
}) + '\n'
client.sendall(request.encode('utf-8'))
response = client.makefile().readline()
| return json.loads(response)['commands']
def _get_output_lines(output):
lines = output.split('\n')
screen = pyte.Screen(get_terminal_size().columns, len(lines))
stream = pyte.Stream(screen)
stream.feed('\n'.join(lines))
return screen.display
def get_output(script):
"""Gets command output from shell logger."""
with logs.debug_time(u'Read output from external shell log | ger'):
commands = _get_last_n(const.SHELL_LOGGER_LIMIT)
for command in commands:
if command['command'] == script:
lines = _get_output_lines(command['output'])
output = '\n'.join(lines).strip()
return output
else:
logs.warn("Output isn't available in shell logger")
return None
|
Outernet-Project/librarian-ondd | librarian_ondd/routes.py | Python | gpl-3.0 | 2,899 | 0 | import logging
from bottle import request
from bottle_utils.ajax import roca_view
from bottle_utils.i18n import lazy_gettext as _, i18n_url
from librarian_core.contrib.templates.renderer import template, view
from .forms import ONDDForm
from .consts import get_form_data_for_preset
@view('ondd/_status')
def get_signal_status():
ondd_client = request.app.supervisor.exts.ondd
snr_min = request.app.config.get('ondd.snr_min', 0.2)
snr_max = request.app.config.get('ondd.snr_max', 0.9)
return dict(status=ondd_client.get_status(), SNR_MIN=snr_min,
SNR_MAX=snr_max)
@roca_view('ondd/settings', 'ondd/_settings_form', template_func=template)
def show_settings_form():
return dict(form=ONDDForm())
@roca_view('ondd/settings', 'ondd/_settings_form', template_func=template)
def set_settings():
preset = request.params.get('preset')
if not preset:
return dict(
form=ONDDForm(request.forms),
# Translators, message shown when user does not select a satellite
# preset nor 'Custom' option to enter custom data.
message=_("Please select a satellite or select 'Custom'"))
try:
preset = int(preset)
except (TypeError, ValueError):
preset = 0
form_data = get_form_data_for_preset(preset, request.forms)
form_data.update({'lnb': request.params.get('lnb')})
form = ONDDForm(form_data)
if not form.is_valid():
return dict(form=form)
logging.info('ONDD: tuner settings updated')
request.app.supervisor.exts.setup.append({'ondd': form.processed_data})
return dict(form=form,
message=_('Transponder configuration saved.'),
redirect_url=i18n_url('dashboard:main'))
@view('ondd/_file_list')
def show_file_list():
ondd_client = request.app.supervisor.exts.ondd
return dict(files=ondd_client.get_transfers())
@view('ondd/_cache_status')
def show_cache_status():
cache | _max = request.app.c | onfig['ondd.cache_quota']
default = {'total': cache_max,
'free': cache_max,
'used': 0,
'alert': False}
cache_status = request.app.supervisor.exts.cache.get('ondd.cache')
return dict(cache_status=cache_status or default)
def routes(config):
skip_plugins = config['app.skip_plugins']
return (
('ondd:status', get_signal_status,
'GET', '/ondd/status/', dict(unlocked=True, skip=skip_plugins)),
('ondd:files', show_file_list,
'GET', '/ondd/files/', dict(unlocked=True, skip=skip_plugins)),
('ondd:cache_status', show_cache_status,
'GET', '/ondd/cache/', dict(unlocked=True, skip=skip_plugins)),
('ondd:settings', show_settings_form,
'GET', '/ondd/settings/', dict(unlocked=True)),
('ondd:settings', set_settings,
'POST', '/ondd/settings/', dict(unlocked=True)),
)
|
Jelloeater/mineOSplayerStats | report_generator.py | Python | gpl-2.0 | 9,875 | 0.002025 | #!/usr/bin/env python2.7
"""A python project for managing Minecraft servers hosted on MineOS (http://minecraft.codeemo.com)
"""
from datetime import datetime
import getpass
import json
import smtplib
import sys
import os
import logging
import argparse
from time import sleep
import db_controller
import keyring
from keyring.errors import PasswordDeleteError
sys.path.append('/usr/games/minecraft') # Strange path issue, only appears when run from local console, not IDE
__author__ = "Jesse S"
__license__ = "GNU GPL v2.0"
__version__ = "1.2"
__email__ = "jelloeater@gmail.com"
LOG_FILENAME = "serverMonitor.log"
def main():
""" Take arguments and direct program """
parser = argparse.ArgumentParser(description="A MineOS Player Stats Database Report Generator"
" (http://github.com/jelloeater/mineOSplayerStats)",
version=__version__,
epilog="Please specify mode")
report_group = parser.add_argument_group('Modes')
report_group.add_argument("-g",
"--generate_report",
help="Generate Weekly Report",
action="store_true")
report_group.add_argument("-s",
"--report_scheduler",
help="Automatically Generate Weekly Report",
action="store_true")
email_group = parser.add_argument_group('E-mail Config')
email_group.add_argument("-e",
"--configure_email_settings",
help="Configure email alerts",
action="store_true")
email_group.add_argument("-r",
"--remove_email_password_store",
help="Removes password stored in system keyring",
action="store_true")
db_group = parser.add_argument_group('Database Config')
db_group.add_argument("-b",
"--configure_db_settings",
help="Configure database settings",
action="store_true")
db_group.add_argument("-p",
"--remove_db_password_store",
help="Removes password stored in system keyring",
action="store_true")
parser.add_argument("-d",
"--delay",
action="store",
type=int,
default=60,
help="Wait x second between checks (ex. 60)")
parser.add_argument("--debug",
action="store_true",
help="Debug Mode Logging")
args = parser.parse_args()
if args.debug:
logging.basicConfig(format="[%(asctime)s] [%(levelname)8s] --- %(message)s (%(filename)s:%(lineno)s)",
level=logging.DEBUG)
logging.debug(sys.path)
logging.debug(args)
logging.debug('Debug Mode Enabled')
else:
logging.basicConfig(filename=LOG_FILENAME,
format="[%(asctime)s] [%(levelname)8s] --- %(message)s (%(filename)s:%(lineno)s)",
level=logging.WARNING)
mode = modes(sleep_delay=args.delay)
# Create new mode object for flow, I'll buy that :)
if len(sys.argv) == 1: # Displays help and lists servers (to help first time users)
parser.print_help()
sys.exit(1)
if args.remove_db_password_store:
db_controller.db_helper().clear_password_store()
if args.configure_db_settings:
db_controller.db_helper().configure()
if args.remove_email_password_store:
gmail().clear_password_store()
if args.configure_email_settings:
gmail().configure()
# Magic starts here
if args.generate_report:
db_controller.db_helper().test_db_setup()
gmail().test_login()
mode.generate_rep | ort()
if args.report_scheduler:
db | _controller.db_helper().test_db_setup()
gmail().test_login()
mode.report_scheduler()
class modes(object): # Uses new style classes
def __init__(self, sleep_delay):
self.sleep_delay = sleep_delay
def sleep(self):
try:
sleep(self.sleep_delay)
except KeyboardInterrupt:
print("Bye Bye.")
sys.exit(0)
def report_scheduler(self):
# TODO Interval should be in days or hours, NOT seconds
self.generate_report()
self.sleep()
@staticmethod
def generate_report(number_of_days=7):
conn, cur = db_controller.db_access().open_connection()
query = '''SELECT * FROM player_activity WHERE "Time_Stamp" >= (now() - '{0} day'::INTERVAL);'''
cur.execute(query.format(number_of_days))
data = cur.fetchall()
db_controller.db_access.close_connection(conn, cur)
logging.debug('DB dump')
logging.debug(data)
# Generate list of server names from query
server_names = set([x[4] for x in data])
server_data = []
# Individual Servers
for i in server_names:
minutes_used = len([x for x in data if x[4] == i])
server_data.append((i, minutes_used))
# Total Usage for period
minutes_used = 0
for i in server_data:
minutes_used += i[1]
msg = ['During the last ' + str(number_of_days) + ' days: \n\n'] # Email Message Body
for i in server_data:
msg.append(i[0])
msg.append(' has used ')
msg.append(str(i[1]))
msg.append(' minute(s). \n')
msg.append('\nA total of ' + str(minutes_used) + ' minute(s) were used.')
msg.append('\n\nReport Generated @ ' + str(datetime.now()))
subj = "Minecraft Server Usage Report"
gmail().send(subject=subj, text=''.join(msg))
# Create gmail obj
class gmailSettings():
""" Container class for load/save """
USERNAME = ""
# Password should be stored with keyring
SEND_ALERT_TO = [] # Must be a list
class SettingsHelper(gmailSettings):
SETTINGS_FILE_PATH = "email_settings.json"
KEYRING_APP_ID = 'mineOSPlayerStats_gmail'
@classmethod
def loadSettings(cls):
if os.path.isfile(cls.SETTINGS_FILE_PATH):
try:
with open(cls.SETTINGS_FILE_PATH) as fh:
gmailSettings.__dict__ = json.loads(fh.read())
except ValueError:
logging.error("Settings file has been corrupted, reverting to defaults")
os.remove(cls.SETTINGS_FILE_PATH)
logging.debug("Settings Loaded")
@classmethod
def saveSettings(cls):
with open(cls.SETTINGS_FILE_PATH, "w") as fh:
fh.write(json.dumps(gmailSettings.__dict__, sort_keys=True, indent=0))
logging.debug("Settings Saved")
class gmail(object, SettingsHelper):
""" Lets users send email messages """
# TODO Maybe implement other mail providers
def __init__(self):
self.loadSettings()
self.PASSWORD = keyring.get_password(self.KEYRING_APP_ID, self.USERNAME) # Loads password from secure storage
def test_login(self):
try:
server = smtplib.SMTP("smtp.gmail.com", 587) # or port 465 doesn't seem to work!
server.ehlo()
server.starttls()
server.login(self.USERNAME, self.PASSWORD)
server.close()
except smtplib.SMTPAuthenticationError:
print("Username password mismatch")
sys.exit(1)
def send(self, subject, text):
message = "\From: {0}\nTo: {1}\nSubject: {2}\n\n{3}".format(self.USERNAME,
", ".join(self.SEND_ALERT_TO),
subject,
text)
logging.info("Sending email")
server = smtplib.SMTP("smtp.gmail.com", 587) # or po |
ocozalp/Algorithms | search/uninformed_search.py | Python | apache-2.0 | 1,797 | 0.002226 | def bfs(node, target, comparator=lambda x, y: x == y):
queue = [node]
visited_nodes = []
while len(queue) != 0:
current_node = queue.pop(0)
if current_node not in visited_nodes:
if comparator(current_node.value, target):
return current_node
queue.extend(current_node)
visited_nodes.append(current_node)
return None
def dfs(node, target, comparator=lambda x, y: x == y):
queue = [node]
visited_nodes = []
while len(queue) != 0:
current_node = queue.pop()
if current_node not in visited_nodes:
if comparator(current_node.value, target):
return current_node
queue.extend(current_node)
visited_nodes.append(current_node)
return None
def dls(node, target, limit, comparator=lambda x, y: x == y):
queue = [(node, 0)]
visited_nodes = []
max_level = 0
while len(queue) != 0:
current_node, current_node_level = queue.pop()
max_level = max(max_level, current_node_level)
if current_node_level <= limit and current_node not in visited_nodes:
if comparator(current_node.value, target):
return current_node, current_node_level
if current_node_level < limit:
queue.extend([(child, c | urrent_node_level + 1) for child in current_node])
visited_nodes.append(current_node)
return None, max_level
def iterative_deepening_search(node, target, comparator=lambda x, y: x == y):
level = 0
found_level = 0
while level == found_level:
level += 1
result, found_level = dls(node, | target, level, comparator)
if result is not None:
return result
return None |
Ultimaker/Cura | plugins/FirmwareUpdateChecker/FirmwareUpdateChecker.py | Python | lgpl-3.0 | 3,835 | 0.007562 | # Copyright (c) 2018 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from PyQt5.QtCore import QUrl
from PyQt5.QtGui import QDesktopServices
from typing import Set
from UM.Extension import Extension
from UM.Applicatio | n import Application
from UM.Logger import Logger
from UM.i18n import i18nCatalog
from UM.Settings.ContainerRegistry import ContainerRegistry
from .FirmwareUpdateCheckerJob import FirmwareUpdateCheckerJob
from .FirmwareUpdateCheckerMessage import FirmwareUpdateCheckerMessage
i18n_catalog = i18nCatalog("cura")
class FirmwareUpdateChecker(Extension):
"""This Extension checks for new versions of the firmware based on the latest checked versi | on number.
The plugin is currently only usable for applications maintained by Ultimaker. But it should be relatively easy
to change it to work for other applications.
"""
def __init__(self) -> None:
super().__init__()
# Listen to a Signal that indicates a change in the list of printers, just if the user has enabled the
# "check for updates" option
Application.getInstance().getPreferences().addPreference("info/automatic_update_check", True)
if Application.getInstance().getPreferences().getValue("info/automatic_update_check"):
ContainerRegistry.getInstance().containerAdded.connect(self._onContainerAdded)
self._check_job = None
self._checked_printer_names = set() # type: Set[str]
def _onActionTriggered(self, message, action):
"""Callback for the message that is spawned when there is a new version."""
if action == FirmwareUpdateCheckerMessage.STR_ACTION_DOWNLOAD:
machine_id = message.getMachineId()
download_url = message.getDownloadUrl()
if download_url is not None:
if QDesktopServices.openUrl(QUrl(download_url)):
Logger.log("i", "Redirected browser to {0} to show newly available firmware.".format(download_url))
else:
Logger.log("e", "Can't reach URL: {0}".format(download_url))
else:
Logger.log("e", "Can't find URL for {0}".format(machine_id))
def _onContainerAdded(self, container):
# Only take care when a new GlobalStack was added
from cura.Settings.GlobalStack import GlobalStack # otherwise circular imports
if isinstance(container, GlobalStack):
self.checkFirmwareVersion(container, True)
def _onJobFinished(self, *args, **kwargs):
self._check_job = None
def checkFirmwareVersion(self, container = None, silent = False):
"""Connect with software.ultimaker.com, load latest.version and check version info.
If the version info is different from the current version, spawn a message to
allow the user to download it.
:param silent: type(boolean) Suppresses messages other than "new version found" messages.
This is used when checking for a new firmware version at startup.
"""
container_name = container.definition.getName()
if container_name in self._checked_printer_names:
return
self._checked_printer_names.add(container_name)
metadata = container.definition.getMetaData().get("firmware_update_info")
if metadata is None:
Logger.log("i", "No machine with name {0} in list of firmware to check.".format(container_name))
return
self._check_job = FirmwareUpdateCheckerJob(silent = silent,
machine_name = container_name, metadata = metadata,
callback = self._onActionTriggered)
self._check_job.start()
self._check_job.finished.connect(self._onJobFinished)
|
iAmMrinal0/django_moviealert | moviealert/settings/base.py | Python | mit | 4,071 | 0 | """
Django settings for moviealert project.
Generated by 'django-admin startproject' using Django 1.8.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.core.exceptions import ImproperlyConfigured
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
def get_env_variable(var_name):
try:
return os.environ[var_name]
except KeyError:
error_msg = "Set env variable: {0}".format(var_name)
raise ImproperlyConfigured(error_msg)
SECRET_KEY = get_env_variable("SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
# DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'moviealert',
)
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'moviealert.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'moviealert.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': get_env_variable("DATABASE_NAME"),
'USER': get_env_variable("DATABASE_USER"),
'PASSWORD': get_env_variable("DATABASE_PASSWORD"),
'HOST': '',
'PORT': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE | = 'Asia/Kolkata'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS | , JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
STATIC_ROOT = '/static/'
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
INSTALLED_APPS += (
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
)
SITE_ID = 1
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "none"
SOCIALACCOUNT_QUERY_EMAIL = True
LOGIN_REDIRECT_URL = "/"
ALLOWED_DATE_FORMAT = (
'%d-%m-%Y', '%d/%m/%Y',
'%d/%m/%y')
# CELERY STUFF
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Asia/Kolkata'
# MAIL STUFF
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = get_env_variable("GMAIL_ID")
EMAIL_HOST_PASSWORD = get_env_variable("GMAIL_PASSWORD")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
|
ryfx/modrana | modules/pyrender/OsmTileData.py | Python | gpl-3.0 | 3,691 | 0.003522 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
# Download OSM data covering the area of a slippy-map tile
#
# Features:
# * Recursive (downloads are all at z15, and merged if necessary to get
# a larger area)
# * Cached (all downloads stored in cache/z/x/y/data.osm)
#
# DON'T RUN THIS ON LARGE AREAS WITHOUT ASKING THE OPERATOR OF THE
# API SERVER. Currently it's limited to downloading a z-13 area or smaller
#----------------------------------------------------------------------------
# Copyright 2008, Oliver White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------------------------------------------
from core.tilen | ames import *
from urllib import *
from OsmMerge import OsmMerge
import os
def GetOsmTileData(z, x, y, AllowSplit=False):
"""Download OSM data for the region covering a slippy-map tile"""
if x < 0 or y < 0 or z < 0 or z > 25:
| print("Disallowed %d,%d at %d" % (x, y, z))
return
DownloadLevel = 15 # All primary downloads are done at a particular zoom level
MergeLevels = 2 # How many layers 'below' the download level to go
directory = 'cache/%d/%d' % (z, x)
filename = '%s/%d.osm' % (directory, y)
if not os.path.exists(directory):
os.makedirs(directory)
if z == DownloadLevel:
# Download the data
(S, W, N, E) = tileEdges(x, y, z)
# Which API to use
if 1:
URL = 'http://%s/api/0.5/map?bbox=%f,%f,%f,%f' % ('api.openstreetmap.org', W, S, E, N)
else:
URL = 'http://%s/api/0.5/*[bbox=%f,%f,%f,%f]' % ('www.informationfreeway.org', W, S, E, N)
if not os.path.exists(filename): # TODO: allow expiry of old data
print("Downloading %s\n from %s" % (filename, URL))
try:
urlretrieve(URL, filename)
print("Done")
except:
print("Error downloading " + filename)
# unlink(filename)
return
else:
print("Using cached %s" % filename)
return filename
elif z < DownloadLevel - MergeLevels:
print("Zoom %d not allowed" % z)
return
elif z < DownloadLevel:
# merge smaller tiles
filenames = []
for i in (0, 1):
for j in (0, 1):
lx = x * 2 + i
ly = y * 2 + j
lz = z + 1
print("Downloading subtile %d,%d at %d" % (x, y, z))
# download (or otherwise obtain) each subtile
filenames.append(GetOsmTileData(lz, lx, ly, AllowSplit))
# merge them together
print("Merging tiles together")
OsmMerge(filename, filenames)
return filename
else:
# use larger tile
while z > DownloadLevel:
z -= 1
x = int(x / 2)
y = int(y / 2)
return GetOsmTileData(z, x, y)
if __name__ == "__main__":
"""test mode"""
GetOsmTileData(14, 7788, 6360, True)
|
Prakhash/security-tools | external/django-DefectDojo-1.2.1/dojo/templatetags/event_tags.py | Python | apache-2.0 | 2,640 | 0.001136 | import re
from django import template
from django import forms
register = template.Library()
def _process_field_attributes(field, attr, process):
# split attribute name and value from 'attr:value' string
params = attr.split(':', 1)
attribute = params[0]
value = params[1] if len(params) == 2 else ''
# decorate field.as_widget method with updated attributes
old_as_widget = field.as_widget
def as_widget(self, widget=None, attrs=None, only_initial=False):
attrs = attrs or {}
process(widget or self.field.widget, attrs, attribute, value)
return old_as_widget(widget, attrs, only_initial)
bound_method = type(old_as_widget)
try:
field.as_widget = bound_method(as_widget, field, field.__class__)
except TypeError: # python 3
field.as_widget = bound_method(as_widget, field)
return field
@register.filter
def subtract(value, arg):
return value - arg
@register.filter
def modulo(num, val):
return num % val
@register.filter
def addcss(field, attr):
def process(widget, attrs, attribute, value):
if attrs.get(attribute):
attrs[attribute] += ' ' + value
elif widget.attrs.get(attribute):
attrs[attribute] = widget.attrs[attribute] + ' ' + value
else:
attrs[attribute] = value
return _process_field_attributes(field, attr, process)
@register.filter
def is_checkbox(field):
return isinstance(field.field.widget, forms.CheckboxInput)
@register.filter
def is_multiple_checkbox(field):
return isinstance(field.fiel | d.widget, forms.CheckboxSelectMultiple)
@register.filter
def is_radio(field):
return isinstance(field.field.widget, forms.RadioSelect)
@r | egister.filter
def is_file(field):
return isinstance(field.field.widget, forms.FileInput) or \
isinstance(field, forms.ClearableFileInput)
@register.filter
def sum_dict(d):
total = 0
for key, value in d.items():
total += value
return total
@register.filter
def nice_title(title):
pat = re.compile(r'Finding [0-9][0-9][0-9]:*')
s = pat.split(title, 2)
try:
ret = s[1]
return ret
except:
return title
@register.filter
def pad_zeroes(num):
return str(num).zfill(3)
@register.filter
def hash(h, key):
return h[key]
@register.filter
def getZero(h, key):
return h[key][0]
@register.filter
def getOne(h, key):
return h[key][1]
@register.filter
def getTwo(h, key):
return h[key][2]
@register.filter
def getThree(h, key):
return h[key][3]
@register.filter
def getFour(h, key):
return h[key][4]
|
dmccloskey/SBaaS_isotopomer | SBaaS_isotopomer/stage01_isotopomer_peakData_io.py | Python | mit | 1,934 | 0.021717 | # System
import json
from .stage01_isotopomer_peakData_query import stage01_isotopomer_peakData_query
from SBaaS_base.sbaas_template_io import sbaas_template_io
# Resources
from io_utilities.base_importData | import base_importData
from io_utilities.base_exportData import base_exportData
class stage01_isotopomer_peakData_io(stage01_isotopomer_peakData_query,sbaas_template_io):
def import_peakData_add(self, filename, experiment_id, samplename, precursor_formula, met_id,
mass_units_I='Da',intensity_units_I='cps', scan_type_I='EPI', header_I=True,
add_data_I=True):
'''table ad | ds'''
data = base_importData();
try:
data.read_tab_fieldnames(filename,['Mass/Charge','Intensity'],header_I);
#data.read_tab_fieldnames(filename,['mass','intensity','intensity_percent'],header_I);
data.format_data();
if add_data_I:
self.add_peakData(data.data, experiment_id, samplename, precursor_formula, met_id,
mass_units_I,intensity_units_I, scan_type_I);
data.clear_data();
except IOError as e:
print(e);
def import_peakList_add(self, filename, experiment_id, samplename, precursor_formula, met_id,
mass_units_I='Da',intensity_units_I='cps',
centroid_mass_units_I='Da', peak_start_units_I='Da',
peak_stop_units_I='Da', resolution_I=None, scan_type_I='EPI'):
'''table adds'''
data = base_importData();
data.read_tab_fieldnames(filename,['mass','centroid_mass','intensity','peak_start','peak_end','width','intensity_percent']);
data.format_data();
self.add_peakList(data.data, experiment_id, samplename, met_id,
mass_units_I,intensity_units_I, scan_type_I);
data.clear_data(); |
daweim0/Just-some-image-features | lib/networks/net_labeled_concat_features_shallower.py | Python | mit | 12,807 | 0.004607 | import tensorflow as tf
from networks.network import Network
from fcn.config import cfg
zero_out_module = tf.load_op_library('lib/triplet_flow_loss/triplet_flow_loss.so')
class custom_network(Network):
def __init__(self):
self.inputs = cfg.INPUT
# self.input_format = input_format
self.num_output_dimensions = 2 # formerly num_classes
self.num_units = cfg.TRAIN.NUM_UNITS
self.scale = 1 / cfg.TRAIN.SCALES_BASE[0]
self.vertex_reg = cfg.TRAIN.VERTEX_REG
self.data_left = tf.placeholder(tf.float32, shape=[None, None, None, 3])
self.data_right = tf.placeholder(tf.float32, shape=[None, None, None, 3])
self.gt_flow = tf.placeholder(tf.float32, shape=[None, None, None, self.num_output_dimensions])
self.occluded = tf.placeholder(tf.int32, shape=[None, None, None, 1])
self.labels_left = tf.placeholder(tf.int32, shape=[None, None, None, None])
self.labels_right = tf.placeholder(tf.int32, shape=[None, None, None, None])
self.keep_prob = tf.placeholder(tf.float32)
self.queue_size = 20
# define a queue
self.q = tf.FIFOQueue(self.queue_size, [tf.float32, tf.float32, tf.float32, tf.int32, tf.int32, tf.int32, tf.float32])
self.enqueue_op = self.q.enqueue([self.data_left, self.data_right, self.gt_flow, self.occluded, self.labels_left, self.labels_right, self.keep_prob])
data_left, data_right, gt_flow, occluded, left_labels, right_labels, self.keep_prob_queue = self.q.dequeue()
self.layers = dict({'data_left': data_left, 'data_right': data_right, 'gt_flow': gt_flow, 'occluded': occluded,
'left_labels': left_labels, "right_labels": right_labels})
self.close_queue_op = self.q.close(cancel_pending_enqueues=True)
self.queue_size_op = self.q.size('queue_size')
self.trainable = cfg.TRAIN.TRAINABLE
if cfg.NET_CONF.CONV1_SKIP_LINK:
self.skip_1_mult = tf.constant(1.0, tf.float32)
else:
self.skip_1_mult = tf.constant(0.0, tf.float32)
if cfg.NET_CONF.CONV2_SKIP_LINK:
self.skip_2_mult = tf.constant(1.0, tf.float32)
else:
self.skip_2_mult = tf.constant(0.0, tf.float32)
if cfg.NET_CONF.CONV3_SKIP_LINK:
self.skip_4_mult = tf.constant(1.0, tf.float32)
else:
self.skip_4_mult = tf.constant(0.0, tf.float32)
self.setup()
def setup(self):
trainable = self.trainable
reuse = True
with tf.device("/cpu:0"):
# scaled versions of ground truth
(self.feed('gt_flow')
.avg_pool(2, 2, 2, 2, name='flow_pool1')
.div_immediate(tf.constant(2.0, tf.float32), name='gt_flow_2x')
.avg_pool(2, 2, 2, 2, name='flow_pool2')
.div_immediate(tf.constant(2.0, tf.float32), name='gt_flow_4x')
.avg_pool(2, 2, 2, 2, name='flow_pool3')
.div_immediate(tf.constant(2.0, tf.float32), name='gt_flow_8x')
.avg_pool(2, 2, 2, 2, name='flow_pool4')
.div_immediate(tf.constant(2.0, tf.float32), name='gt_flow_16x'))
(self.feed('occluded').cast(tf.float32)
.avg_pool(2, 2, 2, 2, name='occluded_2x_avg')
.avg_pool(2, 2, 2, 2, name='occluded_4x_avg')
.avg_pool(2, 2, 2, 2, name='occluded_8x_avg')
.avg_pool(2, 2, 2, 2, name='occluded_16x_avg'))
self.feed('occluded_2x_avg').round().cast(tf.int32, name="occluded_2x")
self.feed('occluded_4x_avg').round().cast(tf.int32, name="occluded_4x")
self.feed('occluded_8x_avg').round().cast(tf.int32, name="occluded_8x")
self.feed('occluded_16x_avg').round().cast(tf.int32, name="occluded_16x")
(self.feed('left_labels').cast(tf.float32)
.avg_pool(2, 2, 2, 2, name='left_labels_2x_avg')
.avg_pool(2, 2, 2, 2, name='left_labels_4x_avg')
.avg_pool(2, 2, 2, 2, name='left_labels_8x_avg')
.avg_pool(2, 2, 2, 2, name='left_labels_16x_avg'))
self.feed('left_labels_2x_avg').round().cast(tf.int32, name="left_labels_2x")
self.feed('left_labels_4x_avg').round().cast(tf.int32, name="left_labels_4x")
self.feed('left_labels_8x_avg').round().cast(tf.int32, name="left_labels_8x")
self.feed('left_labels_16x_avg').round().cast(tf.int32, name="left_labels_16x")
(self.feed('right_labels').cast(tf.float32)
.avg_pool(2, 2, 2, 2, name='right_labels_2x_avg')
.avg_pool(2, 2, 2, 2, name='right_labels_4x_avg')
.avg_pool(2, 2, 2, 2, name='right_labels_8x_avg')
.avg_pool(2, 2, 2, 2, name='right_labels_16x_avg'))
self.feed('right_labels_2x_avg').round().cast(tf.int32, name="right_labels_2x")
self.feed('right_labels_4x_avg').round().cast(tf.int32, name="right_labels_4x")
self.feed('right_labels_8x_avg').round().cast(tf.int32, name="right_labels_8x")
self.feed('right_labels_16x_avg').round().cast(tf.int32, name="right_labels_16x")
# left tower
(self.feed('data_left')
.add_immediate(tf.constant(0.0, tf.float32), name='data_left_tap')
.conv(3, 3, 64, 1, 1, name='conv1_1', c_i=3, trainable=trainable)
.conv(3, 3, 64, 1, 1, name='conv1_2', c_i=64, trainable=trainable)
.add_immediate(tf.constant(0.0, tf.float32), name='conv1_l')
.max_pool(2, 2, 2, 2, name='pool1')
.conv(3, 3, 128, 1, | 1, name='conv2_1', c_i=64, trainable=trainable)
.conv(3, 3, 128, 1, 1, name='conv2_2', c_i=128, trainable=trainable)
.add_immediate(tf.constant(0.0, tf.float32), name='conv2_l')
.max_pool(2, 2, 2, 2, name='pool2')
.conv(3, 3, 256, 1, 1, name='conv3_1', | c_i=128, trainable=trainable)
.conv(3, 3, 256, 1, 1, name='conv3_2', c_i=256, trainable=trainable)
.conv(3, 3, 256, 1, 1, name='conv3_3', c_i=256, trainable=trainable)
.add_immediate(tf.constant(0.0, tf.float32), name='conv3_l')
.max_pool(2, 2, 2, 2, name='pool3')
.conv(3, 3, 512, 1, 1, name='conv4_1', c_i=256, trainable=trainable)
.conv(3, 3, 512, 1, 1, name='conv4_2', c_i=512, trainable=trainable)
.conv(3, 3, 512, 1, 1, name='conv4_3', c_i=512, trainable=trainable)
.add_immediate(tf.constant(0.0, tf.float32), name='conv4_3_l'))
# 8x scaling input
(self.feed('conv4_3_l')
.conv(1, 1, 256, 1, 1, name='8x_skip_cov_1', c_i=512, elu=True)
.conv(3, 3, 512, 1, 1, name='8x_skip_cov_2', c_i=256, elu=True)
.conv(1, 1, 128, 1, 1, name='8x_skip_cov_3', c_i=512, elu=True)
.conv(3, 3, 64, 1, 1, name='8x_skip_cov_4', c_i=128, elu=True)
.add_immediate(tf.constant(0.0, tf.float32), name='features_8x_l'))
# 4x scaling input
(self.feed('conv3_l')
.conv(3, 3, 96, 1, 1, name='4x_skip_conv_1', elu=True, c_i=256)
# .conv(1, 1, 96, 1, 1, name='4x_skip_conv_2', elu=True, c_i=96)
# .conv(3, 3, 64, 1, 1, name='4x_skip_conv_3', elu=True, c_i=96)
.conv(1, 1, 96, 1, 1, name='4x_skip_conv_4', elu=True, c_i=96)
.conv(3, 3, 32, 1, 1, name='4x_skip_conv_5', elu=True, c_i=96)
.add_immediate(tf.constant(0.0, tf.float32), name='features_4x_l'))
# 2x scaling input
(self.feed('conv2_l')
.conv(3, 3, 96, 1, 1, name='2x_skip_conv_1', elu=True, c_i=128)
.conv(1, 1, 64, 1, 1, name='2x_skip_conv_2', elu=True, c_i=96)
.conv(3, 3, 16, 1, 1, name='2x_skip_conv_3', c_i=64, elu=True)
.add_immediate(tf.constant(0.0, tf.float32), name='features_2x_l'))
# 1x scaling input
(self.feed('conv1_l')
.conv(3, 3, 32, 1, 1, name='1x_skip_conv_1', elu=True, c_i=64)
.conv(3, 3, 8, 1, 1, name='1x_skip_conv_2', c_i=32, elu=True)
.add_immediate(tf.constant(0.0, tf.float32), name='features_1x_l'))
# right tower
(self.feed('data_right')
.add_immediate(tf.constant(0.0, tf.float32), name='data_right_tap')
|
johnaparker/MiePy | miepy/__init__.py | Python | mit | 343 | 0.005831 | """
MiePy
=======
Python module to calcuate scattering coefficien | ts of a plane wave incident on a sphere or core-shell structure using Mie theory
"""
#main submodules
from . import scattering
from . import materials
from . import mie_sphere
from . import array_io
from .mie_sphere import sph | ere
from .materials import material, load_material
|
gnboorse/videowatcher | setup.py | Python | gpl-3.0 | 356 | 0.042135 | #!/usr/bin/en | v python3
from setuptools import setup
#setup the package as the default application
setup(
name = 'vid | eowatcher',
packages = ['videowatcher'],
version = '0.1.5',
author = 'Gabriel Boorse',
description = 'Shared video playback package',
include_package_data = True,
install_requires = [
'flask',
],
)
|
jamesliu/mxnet | tests/python/unittest/test_gluon_trainer.py | Python | apache-2.0 | 7,882 | 0.002664 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
import unittest
import numpy as np
from mxnet import gluon
from mxnet.gluon import nn
from mxnet.test_utils import assert_almost_equal
from common import setup_module, with_seed, assertRaises
from copy import deepcopy
from nose.tools import raises, assert_raises
@with_seed()
@raises(RuntimeError)
def test_multi_trainer():
x = gluon.Parameter('x', shape=(10,), stype='row_sparse')
x.initialize()
# test set trainer
trainer0 = gluon.Trainer([x], 'sgd')
assert(x._trainer is trainer0)
# test unset trainer
x._set_trainer(None)
assert(x._t | rainer is None)
x._set_trainer(trainer0)
# multiple trainers for a sparse Parameter is not allowe | d
trainer1 = gluon.Trainer([x], 'sgd')
@with_seed()
def test_trainer():
def dict_equ(a, b):
assert set(a) == set(b)
for k in a:
assert (a[k].asnumpy() == b[k].asnumpy()).all()
x = gluon.Parameter('x', shape=(10,))
x.initialize(ctx=[mx.cpu(0), mx.cpu(1)], init='zeros')
trainer = gluon.Trainer([x], 'sgd', {'learning_rate': 1.0, 'momentum': 0.5})
with mx.autograd.record():
for w in x.list_data():
y = w + 1
y.backward()
trainer.step(1)
assert (x.data(mx.cpu(1)).asnumpy() == -2).all()
x.lr_mult = 0.5
with mx.autograd.record():
for w in x.list_data():
y = w + 1
y.backward()
trainer.step(1)
assert (x.data(mx.cpu(1)).asnumpy() == -4).all()
trainer.save_states('test_trainer.states')
states = deepcopy(trainer._kvstore._updater.states) if trainer._update_on_kvstore \
else deepcopy(trainer._updaters[0].states)
trainer.load_states('test_trainer.states')
if trainer._update_on_kvstore:
dict_equ(trainer._kvstore._updater.states, states)
assert trainer._optimizer == trainer._kvstore._updater.optimizer
else:
for updater in trainer._updaters:
dict_equ(updater.states, states)
assert trainer._optimizer == trainer._updaters[0].optimizer
assert_raises(AssertionError, trainer.update, 1)
assert_raises(AssertionError, trainer.allreduce_grads)
x = gluon.Parameter('x', shape=(10,))
x.initialize(ctx=[mx.cpu(0), mx.cpu(1)], init='zeros')
trainer2 = gluon.Trainer([x], 'sgd', {'learning_rate': 1.0, 'momentum': 0.5},
update_on_kvstore=False)
with mx.autograd.record():
for i, w in enumerate(x.list_data()):
y = i*w
y.backward()
assert (x.grad(mx.cpu(0)).asnumpy() != x.grad(mx.cpu(1)).asnumpy()).all()
trainer2.allreduce_grads()
assert (x.grad(mx.cpu(0)).asnumpy() == x.grad(mx.cpu(1)).asnumpy()).all()
trainer2.update(1)
assert (x.data(mx.cpu(1)).asnumpy() == -1).all(), x.data(mx.cpu(1)).asnumpy()
@with_seed()
def test_trainer_save_load():
x = gluon.Parameter('x', shape=(10,), lr_mult=1.0)
x.initialize(ctx=[mx.cpu(0), mx.cpu(1)], init='zeros')
trainer = gluon.Trainer([x], 'sgd', {'learning_rate': 0.1})
with mx.autograd.record():
for w in x.list_data():
y = w + 1
y.backward()
trainer.step(1)
assert trainer._kvstore._updater.optimizer._get_lr(0) == 0.1
trainer.save_states('test_trainer_save_load.states')
trainer.load_states('test_trainer_save_load.states')
x.lr_mult = 2.0
# check if parameter dict is correctly associated with optimizer after load_state
assert trainer._kvstore._updater.optimizer._get_lr(0) == 0.2
@with_seed()
def test_trainer_multi_layer_init():
class Net(gluon.Block):
def __init__(self, **kwargs):
super(Net, self).__init__(**kwargs)
with self.name_scope():
# sparse param
self.embed_weight = self.params.get('embed_weight', stype='row_sparse',
shape=(4,3), grad_stype='row_sparse')
# dense param from a hybrid block
self.dense0 = nn.Dense(2)
def forward(self, x):
embed_weight = self.embed_weight.row_sparse_data(x)
embed = mx.nd.Embedding(data=x, weight=embed_weight,
input_dim=4, output_dim=3, sparse_grad=True)
return self.dense0(embed)
def check_init(ctxes):
net = Net(prefix='net_')
net.initialize(mx.init.One(), ctx=ctxes)
trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate': 1})
data = mx.nd.array([[0,2], [1,2]])
xs = gluon.utils.split_and_load(data, ctxes)
ys = []
with mx.autograd.record():
for x in xs:
y = net(x)
ys.append(y)
for y in ys:
y.backward()
trainer.step(1)
# all parameters should be initialized
assert not trainer._params_to_init
all_rows = mx.nd.arange(0, 4, ctx=mx.cpu(1))
# check the updated weights
weight = net.embed_weight.row_sparse_data(all_rows).asnumpy()
assert (weight[0] == -1).all()
assert (weight[1] == -1).all()
assert (weight[2] == -3).all()
assert (weight[3] == 1).all()
check_init([mx.cpu(1), mx.cpu(2)])
check_init([mx.cpu(1)])
@with_seed()
def test_trainer_save_load():
x = gluon.Parameter('x', shape=(10,), lr_mult=1.0)
x.initialize(ctx=[mx.cpu(0), mx.cpu(1)], init='zeros')
trainer = gluon.Trainer([x], 'sgd', {'learning_rate': 0.1})
with mx.autograd.record():
for w in x.list_data():
y = w + 1
y.backward()
trainer.step(1)
assert trainer._kvstore._updater.optimizer._get_lr(0) == 0.1
trainer.save_states('test_trainer_save_load.states')
trainer.load_states('test_trainer_save_load.states')
x.lr_mult = 2.0
# check if parameter dict is correctly associated with optimizer after load_state
assert trainer._kvstore._updater.optimizer._get_lr(0) == 0.2
@with_seed()
def test_trainer_reset_kv():
def check_trainer_reset_kv(kv):
params = gluon.ParameterDict()
x = params.get('x', shape=(10,), lr_mult=1.0)
params.initialize(ctx=[mx.cpu(0), mx.cpu(1)], init='zeros')
trainer = gluon.Trainer(params, 'sgd', {'learning_rate': 0.1}, kvstore=kv)
params.save('test_trainer_reset_kv.params')
with mx.autograd.record():
for w in x.list_data():
y = w + 1
y.backward()
trainer.step(1)
assert trainer._kvstore.type == kv
# load would reset kvstore
mx.nd.waitall()
params.load('test_trainer_reset_kv.params')
assert trainer._kvstore is None
assert trainer._kv_initialized is False
with mx.autograd.record():
for w in x.list_data():
y = w + 1
y.backward()
trainer.step(1)
# the updated parameter should be based on the loaded checkpoint
assert (x.data(mx.cpu()) == -0.2).asnumpy().all()
kvs = ['local', 'device']
for kv in kvs:
check_trainer_reset_kv(kv)
|
0compute/makeenv | doc/conf.py | Python | mit | 8,696 | 0.0069 | # -*- coding: utf-8 -*-
#
# makeenv documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 27 21:24:26 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
# readthedocs does not build from inside the makeenv environment so we have to
# hack it around a bit here
if "READTHEDOCS" in os.environ:
import sys
import tempfile
# put us on the sys.path
MAKEENV_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, MAKEENV_ROOT)
# to silence a warning from the makefile
os.environ.setdefault("PIP_DOWNLOAD_CACHE", tempfile.mkdtemp())
# build the module doc
import subprocess
subprocess.check_output(("make", "-C", MAKEENV_ROOT, "sphinx-module-rst"),
stderr=subprocess.STDOUT)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinxcontrib.programoutput']
# Add any paths that contain templates here, relative to this directory.
#templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'makeenv'
copyright = u'2012, Arthur Noel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "0.1-dev"
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['.build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. S | ee the documentation for
# a list of | builtin themes.
html_theme = 'sphinxdoc'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'makeenvdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'makeenv.tex', u'makeenv Documentation',
u'Arthur Noel', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'makeenv', u'makeenv Documentation',
[u'Arthur Noel'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'makeenv', u'makeenv Docu |
repotvsupertuga/tvsupertuga.repository | script.module.streamtvsupertuga/lib/resources/lib/modules/views.py | Python | gpl-2.0 | 1,864 | 0.008584 | # -*- coding: UTF-8 -*-
try: from sqlite3 import dbapi2 as database
except: from pysqlite2 import dbapi2 as database
from resources.lib.modules import control
def addView(content):
try:
skin = control.skin
record = (skin, content, str(control.getCurrentViewId()))
control.makeFile(control.dataPath)
dbcon = database.connect(control.viewsFile)
dbcur = dbcon.cursor()
dbcur.execute("CREATE TABLE IF NOT EXISTS views (""skin TEXT, ""view_type TEXT, ""view_id TEXT, ""UNIQUE(skin, view_type)"");")
dbcur.execute("DELETE FROM views WHERE skin = '%s' AND view_type = '%s'" % (record[0], record[1]))
dbcur.execute("INSERT INTO views Values (?, ?, ?)", record)
dbcon.commit()
viewName = control.infoLabel('Container.Viewmode')
skinName = control.addon(skin).getAddonInfo('name')
skinIcon = control.addon(skin).getAddonInfo('icon')
control.infoDialog(viewName, heading=skinName, sound=True, icon=skinIcon)
except:
return
def setView(content, viewDict=None):
for i in range(0, 200):
if control.condVisibility('Container.Content(%s)' % content):
try:
skin = control.skin
record = (skin, content)
dbcon = database.connect(control.viewsFile)
dbcur = dbcon.cursor()
dbcur.execute("SELECT * FROM views WHERE skin = '%s' AND view_type = '%s'" % (record[0], record[1]))
view = dbcur.fetchone()
view = view[2]
if view == None: raise Exception()
return control.execute('Container.SetViewMode(%s)' % str(view))
except:
| try: | return control.execute('Container.SetViewMode(%s)' % str(viewDict[skin]))
except: return
control.sleep(100)
|
kevinseelbach/generic_utils | src/generic_utils/config/__init__.py | Python | bsd-3-clause | 10,446 | 0.003159 | """
Generic configuration interface and module which allows for exposing environment/application configuration through a
generic uniformly available interface
"""
# future/compat
from builtins import str
from past.builtins import basestring
# stdlib
import ast
import importlib
import inspect
import os
from generic_utils import NOTSET
from generic_utils import loggingtools
from generic_utils.base_utils import ImmutableDelay
from generic_utils.base_utils import ImmutableMixin
from generic_utils.classtools import get_classfqn
from ..typetools import is_iterable
from ..typetools import parse_bool
log = loggingtools.getLogger()
def get_config_value(property_name, default=None, secure=False, val_type=None):
"""
Returns the value for the provided configuration property if it is defined through the available configuration
systems otherwise `default` is returned.
:param property_name: The name of the configuration property to retrieve.
:param default: The default value to return if the configuration property is not available
:param secure: Whether or not the property is considered a secure property and thus the value should be protected
when logging or any other type of reporting.
:param val_typ | e: The data type to cast the value to. This can be a single value or an iterable of types to attempt
to cast the value to in the order they are provided. If this is not provi | ded and a value is
provided for `default` then the type of that value will be used.
:return: The value of the configuration property `property_name`.
"""
# pylint: disable=too-many-branches
if property_name in os.environ:
val = os.environ[property_name]
location = "ENVIRONMENT"
else:
val = default
location = "DEFAULT"
if val_type is None and default is not None:
target_type = type(default)
log.debug("Assuming target type to be %s", target_type)
val_type = [target_type]
if not is_iterable(val_type):
val_type = [val_type]
# pylint: disable=too-many-nested-blocks
for cast_option in val_type:
try:
if not isinstance(val, cast_option):
if cast_option is dict:
val = ast.literal_eval(val)
elif cast_option is tuple or cast_option is list:
if isinstance(val, basestring):
val = val.strip()
if val:
if isinstance(val, basestring):
val = val.split(",")
val = [item.strip() for item in val]
else:
val = []
if cast_option is tuple:
val = tuple(val) # pylint: disable=redefined-variable-type
elif cast_option is bool and isinstance(val, basestring):
val = parse_bool(val)
else:
val = cast_option(val)
if isinstance(val, cast_option):
break
except (TypeError, ValueError):
pass
log.debug("Could not cast value '%s' to type %s", "*********" if secure else val, cast_option)
log.info("loaded '%s' from %s; value = '%s' and is type %s",
property_name, location, "*********" if secure else val, type(val))
return val
class ConfigKeyError(KeyError):
"""Exception which is raised if a requested config key does not exist for a given Config
"""
missing_key = None
config = None
message = "Key {missing_key} does not exist within config {config_name}"
def __init__(self, config_obj, missing_key):
self.missing_key = missing_key
self.config = config_obj
message = self.message.format(missing_key=missing_key, config_name=config_obj.name)
super(ConfigKeyError, self).__init__(message)
class Config(object):
"""Generic interface for retrieving configuration from
"""
_config_dict = {}
_config_providers = []
#: The name assigned to this config object for logging and other display purposes
_name = None
def __init__(self, name=None, initial_config=None, *providers):
self._name = name
self._config_dict = dict(initial_config) if initial_config else {}
self._config_providers = []
for provider in providers:
self.add_provider(provider)
super(Config, self).__init__()
def add_provider(self, provider):
"""Adds a config provider to the current config object to provide additional sources of config information
:param provider: The provider to add to the current config
:type provider: Config
"""
log.info("Config provider %s added to Config %s", provider.name, self.name)
self._config_providers.append(provider)
def get_conf_value(self, key, default_value=NOTSET, value_type_func=None):
"""Retrieves the requested config `key` from the configuration
:param key: The configuration value to retrieve
:type key: str
:param default_value: The default value to return if the provided key doesn't exist. If this is not set
and the requested key does not exist then a `ConfigKeyError` is raised
:type default_value: value_type
:param value_type_func: Function which will convert the underlying raw config value to the expected type for the
requested config value.
:type value_type_func: func
:return: The request config value, or KeyError if it does not exist
:raises: ConfigKeyError
"""
# Currently do not support nested keys (e.g. "a.b.c")
try:
return_val = self._get_raw_value(key)
except ConfigKeyError:
if default_value is not NOTSET:
log.debug("Config value for key %s not found. Using default value", key)
return_val = default_value
else:
raise ConfigKeyError(self, key)
if value_type_func:
return_val = value_type_func(return_val)
return return_val
@property
def name(self):
"""
:return: The name for this config which is suitable for display/identification
:rtype: str
"""
return self._name if self._name else get_classfqn(self)
@property
def is_readonly(self): # pylint: disable=no-self-use
"""
:return: Whether or not this configuration is readonly or not
:rtype: bool
"""
return False
def __getitem__(self, item):
return self.get_conf_value(item)
def __setitem__(self, key, value):
setattr(self, key, value)
def __setattr__(self, key, value):
try:
super(Config, self).__setattr__(key, value)
except AttributeError:
self._config_dict[key] = value
def __getattr__(self, item):
# First try and see if the attribute is explicitly defined on the class, otherwise pull from config
try:
return super(Config, self).__getattribute__(item)
except AttributeError:
pass
try:
return self.get_conf_value(item)
except KeyError:
raise ConfigKeyError(self, item)
def __contains__(self, item):
if item in self._config_dict:
return True
for provider in self._config_providers:
if item in provider:
return True
return False
def _get_raw_value(self, key):
"""Return the raw config value for `key`. If `key` does not exist than `ConfigKeyError` is raised
:param key: The config key to get the raw value for
:type key: str
:return: The requested raw config value
"""
return_val = NOTSET
try:
return_val = self._config_dict[key]
log.debug("Found config value %s in current provider", key)
except KeyError:
for provider in self._config_providers:
try:
retu |
thomasWajs/cartridge-external-payment | cartridge_external_payment/providers/be2bill.py | Python | bsd-2-clause | 1,363 | 0.002201 | # -*- coding: utf-8 -*-
from decimal import Decimal
from be2bill_sdk import Be2BillForm
from cartridge_external_payment.providers.base import PaymentProvider
class Be2BillProvider(PaymentProvider):
def get_start_payment_form(self, request, order):
total = Decimal(order.total * 100).quantize(Decimal('0'))
fullname = order.billing_detail_first_name + ' ' + \
order.billing_detail_last_name
client_ident = "{} ({})".format(fullname, order.billing_detail_email)
return Be2BillForm(operation_type="payment",
client_ident=client_ident,
description="X",
order_id=order.id,
amount=total,
client_email=order.billing_detail_email,
card_full_name=fullname, |
#Save cart id for notification
extra_data=request.cart.id)
def get_order_id(self, notification_reques | t):
return notification_request.GET.get('ORDERID', None)
def get_transaction_id(self, notification_request):
return notification_request.GET.get('TRANSACTIONID', None)
def get_cart_id(self, notification_request):
raise notification_request.GET.get('EXTRADATA', None)
|
abhinavsingh/proxy.py | proxy/dashboard/dashboard.py | Python | bsd-3-clause | 1,872 | 0.000536 | # -*- coding: utf-8 -*-
"""
proxy.py
~~~~~~~~
⚡⚡⚡ Fast, Lightweight, Pluggable, TLS interception capable proxy server focused on
Network monitoring, controls & Application development, testing, debugging.
:copyright: (c) 2013-pr | esent by Abhinav Singh and contributors.
:license: BSD, see LICENSE for more detai | ls.
"""
import os
import logging
from typing import List, Tuple
from ..http.parser import HttpParser
from ..http.server import HttpWebServerBasePlugin, httpProtocolTypes
from ..http.responses import permanentRedirectResponse
logger = logging.getLogger(__name__)
class ProxyDashboard(HttpWebServerBasePlugin):
"""Proxy Dashboard."""
# Redirects to /dashboard/
REDIRECT_ROUTES = [
(httpProtocolTypes.HTTP, r'/dashboard$'),
(httpProtocolTypes.HTTPS, r'/dashboard$'),
(httpProtocolTypes.HTTP, r'/dashboard/proxy.html$'),
(httpProtocolTypes.HTTPS, r'/dashboard/proxy.html$'),
]
# Index html route
INDEX_ROUTES = [
(httpProtocolTypes.HTTP, r'/dashboard/$'),
(httpProtocolTypes.HTTPS, r'/dashboard/$'),
]
def routes(self) -> List[Tuple[int, str]]:
return ProxyDashboard.REDIRECT_ROUTES + \
ProxyDashboard.INDEX_ROUTES
def handle_request(self, request: HttpParser) -> None:
if request.path == b'/dashboard/':
self.client.queue(
self.serve_static_file(
os.path.join(
self.flags.static_server_dir,
'dashboard', 'proxy.html',
),
self.flags.min_compression_length,
),
)
elif request.path in (
b'/dashboard',
b'/dashboard/proxy.html',
):
self.client.queue(permanentRedirectResponse(b'/dashboard/'))
|
adamcik/mopidy-echonest | tests/test_extension.py | Python | apache-2.0 | 542 | 0 | from __future__ import unicode_literals
import unittest
from mopidy_echonest import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[echonest]', config)
self.assertIn(' | enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
self.assertIn('a | pikey', schema)
# TODO Write more tests
|
culturagovbr/sistema-nacional-cultura | adesao/managers.py | Python | agpl-3.0 | 768 | 0.001309 | from django.db import models
from django.core.exceptions import EmptyResultSet
class SistemaManager(models.Manager):
""" Mana | ger utilizado para interações com os Sistemas de Cultura """
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.distinct('ente_federado__nome', 'ente_federado')
return queryset.filter(id__in=queryset).select_related()
class HistoricoManager(mod | els.Manager):
"""
Manager responsavel pelo gerenciamento de histórico de um determinado ente federado.
"""
def ente(self, cod_ibge=None):
""" Retorna o histórico de um ente federado """
if not cod_ibge:
raise EmptyResultSet
return self.filter(ente_federado__cod_ibge=cod_ibge)
|
Sabayon/anaconda | pyanaconda/installclasses/corecd.py | Python | gpl-2.0 | 1,357 | 0.000737 | #
# corecd.py
#
# Copyright (C) 2014 Fabio Erculiani
#
# This program is free software; you can redistr | ibute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See | the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from pyanaconda.installclass import BaseInstallClass
from pyanaconda.i18n import N_
from pyanaconda.sabayon import Entropy
class InstallClass(BaseInstallClass):
id = "sabayon_corecd"
name = N_("Sabayon Core")
sortPriority = 9998
_l10n_domain = "anaconda"
efi_dir = "sabayon"
dmrc = None
def configure(self, anaconda):
BaseInstallClass.configure(self, anaconda)
BaseInstallClass.setDefaultPartitioning(self, anaconda.storage)
def getBackend(self):
from pyanaconda.sabayon.livecd import LiveCDCopyBackend
return LiveCDCopyBackend
def __init__(self):
BaseInstallClass.__init__(self)
|
TheAlgorithms/Python | data_structures/linked_list/print_reverse.py | Python | mit | 1,768 | 0.000566 | from __future__ import annotations
class Node:
def __init__(self, data=None):
self.data = data
self.next = None
def __repr__(self):
"""Returns a visual representation of the node and all its following nodes."""
string_rep = []
temp = self
while temp:
string_rep.append(f"{temp.data}")
temp = temp.next
return "->".join(string_rep)
def make_linked_list(elements_list: list):
"""Creates a Linked List from the elements of the given sequence
(list/tup | le) | and returns the head of the Linked List.
>>> make_linked_list([])
Traceback (most recent call last):
...
Exception: The Elements List is empty
>>> make_linked_list([7])
7
>>> make_linked_list(['abc'])
abc
>>> make_linked_list([7, 25])
7->25
"""
if not elements_list:
raise Exception("The Elements List is empty")
current = head = Node(elements_list[0])
for i in range(1, len(elements_list)):
current.next = Node(elements_list[i])
current = current.next
return head
def print_reverse(head_node: Node) -> None:
"""Prints the elements of the given Linked List in reverse order
>>> print_reverse([])
>>> linked_list = make_linked_list([69, 88, 73])
>>> print_reverse(linked_list)
73
88
69
"""
if head_node is not None and isinstance(head_node, Node):
print_reverse(head_node.next)
print(head_node.data)
def main():
from doctest import testmod
testmod()
linked_list = make_linked_list([14, 52, 14, 12, 43])
print("Linked List:")
print(linked_list)
print("Elements in Reverse:")
print_reverse(linked_list)
if __name__ == "__main__":
main()
|
mbr/flatland0 | tests/schema/test_constrained.py | Python | mit | 2,931 | 0.000341 | import six
from flatland import (
Constrained,
Enum,
Integer,
)
def test_constrained_no_default_validity():
el = Constrained(u'anything')
assert el.value is None
assert el.u == u'anything'
def test_constrained_instance_override():
def make_checker(*ok_values):
def is_valid(element, value):
assert isinstance(element, Constrained)
return value in ok_values
return is_valid
el = Constrained(valid_value=make_checker(u'a'))
assert el.set(u'a')
assert el.value == u'a'
assert el.u == u'a'
assert not el.set(u'b')
assert el.value is None
assert el.u == u'b'
el = Constrained(child_type=Integer, valid_value=make_checker(1, 2))
assert el.set(u'1')
assert el.value == 1
assert el.u == u'1'
assert el.set(u'2')
assert el.value == 2
assert el.u == u'2'
for invalid in u'3', u'x':
assert not el.set(invalid)
assert el.value is None
assert el.u == invalid
def test_constrained_instance_contrived():
# check that fringe types that adapt as None can used in bounds
class CustomInteger(Integer):
def adapt(self, value):
try:
return Integer.adapt(self, value)
except:
return None
el = Constrained(child_type=CustomInteger,
valid_value=lambda e, v: v in (1, None))
assert el.set(u'1')
for out_of_bounds in u'2', u'3':
assert not el.set(out_of_bounds)
for invalid in u'x', u'':
assert el.set(invalid)
| assert el.value is None
assert el.u == u''
def test_default_enum():
good_values = (u'a', u'b', u'c')
for good_val in good_values:
for schema in (Enum.using(valid_values=good_values),
Enum.valued(*good_ | values)):
el = schema()
assert el.set(good_val)
assert el.value == good_val
assert el.u == good_val
assert el.validate()
assert not el.errors
schema = Enum.valued(*good_values)
el = schema()
assert not el.set(u'd')
assert el.value is None
assert el.u == u'd'
# present but not converted
assert el.validate()
el = schema()
assert not el.set(None)
assert el.value is None
assert el.u == u''
# not present
assert not el.validate()
def test_typed_enum():
good_values = range(1, 4)
schema = Enum.using(valid_values=good_values, child_type=Integer)
for good_val in good_values:
el = schema()
assert el.set(six.text_type(good_val))
assert el.value == good_val
assert el.u == six.text_type(good_val)
assert not el.errors
el = schema()
assert not el.set(u'x')
assert el.value is None
assert el.u == u'x'
el = schema()
assert not el.set(u'5')
assert el.value is None
assert el.u == u'5'
|
repology/repology | repology-update.py | Python | gpl-3.0 | 16,579 | 0.004102 | #!/usr/bin/env python3
#
# Copyright (C) 2016-2019 Dmitry Marakasov <amdmi3@amdmi3.ru>
#
# This file is part of repology
#
# repology is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# repology is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with repology. If not, see <http://www.gnu.org/licenses/>.
import argparse
import sys
from timeit import default_timer as timer
from typing import Any, Callable, Iterable, TypeVar
from repology.config import config
from repology.database import Database
from repology.dblogger import LogRunManager
from repology.logger import FileLogger, Logger, StderrLogger
from repology.querymgr import QueryManager
from repology.repomgr import RepositoryManager
from repology.repoproc import RepositoryProcessor
from repology.transformer import PackageTransformer
from repology.transformer.ruleset import Ruleset
from repology.update import UpdateProcess
from repology.yamlloader import ParsedConfigCache, YamlConfig
T = TypeVar('T')
def cached_method(method: Callable[..., T]) -> Callable[..., T]:
def wrapper(self: 'Environment', *args: Any, **kwargs: Any) -> Any:
name = '_' + method.__name__ + '_state'
res = getattr(self, name, None)
if res is None:
res = method(self, *args, **kwargs)
setattr(self, name, res)
return res
return wrapper
class Environment:
options | : argparse.Namespace
def __ | init__(self, options: argparse.Namespace) -> None:
self.options = options
@cached_method
def get_query_manager(self) -> QueryManager:
return QueryManager(self.options.sql_dir)
@cached_method
def get_main_database_connection(self) -> Database:
return Database(self.options.dsn, self.get_query_manager(), readonly=False, application_name='repology-update')
@cached_method
def get_parsed_config_cache(self) -> ParsedConfigCache | None:
return ParsedConfigCache(self.options.config_cache) if self.options.config_cache else None
@cached_method
def get_logging_database_connection(self) -> Database:
return Database(self.options.dsn, self.get_query_manager(), readonly=False, autocommit=True, application_name='repology-update-logging')
@cached_method
def get_repos_config(self) -> YamlConfig:
return YamlConfig.from_path(self.options.repos_dir, self.get_parsed_config_cache())
@cached_method
def get_repo_manager(self) -> RepositoryManager:
return RepositoryManager(self.get_repos_config())
@cached_method
def get_repo_processor(self) -> RepositoryProcessor:
return RepositoryProcessor(self.get_repo_manager(), self.options.statedir, self.options.parseddir, safety_checks=self.options.enable_safety_checks)
@cached_method
def get_rules_config(self) -> YamlConfig:
return YamlConfig.from_path(self.options.rules_dir, self.get_parsed_config_cache())
@cached_method
def get_ruleset(self) -> Ruleset:
return Ruleset(self.get_rules_config())
@cached_method
def get_enabled_repo_names(self) -> list[str]:
return self.get_repo_manager().get_names(self.options.enabled_repositories)
@cached_method
def get_processable_repo_names(self) -> list[str]:
enabled = set(self.get_enabled_repo_names())
return [reponame for reponame in self.get_repo_manager().get_names(self.options.reponames) if reponame in enabled]
@cached_method
def get_main_logger(self) -> Logger:
return FileLogger(self.options.logfile) if self.options.logfile else StderrLogger()
def get_options(self) -> argparse.Namespace:
return self.options
def process_repositories(env: Environment) -> None:
database = env.get_main_database_connection()
for reponame in env.get_processable_repo_names():
repository = env.get_repo_manager().get_repository(reponame)
update_period = repository.update_period
since_last_fetched = database.get_repository_since_last_fetched(reponame)
skip_fetch = since_last_fetched is not None and since_last_fetched < update_period
if env.get_options().fetch and skip_fetch:
env.get_main_logger().log(f'not fetching {reponame} to honor update period ({update_period-since_last_fetched} left)'.format(reponame))
elif env.get_options().fetch:
env.get_main_logger().log('fetching {}'.format(reponame))
# make sure hash is reset untill it's known that the update did not untroduce any changes
old_hash = database.get_repository_ruleset_hash(reponame)
database.update_repository_ruleset_hash(reponame, None)
database.commit()
allow_update = env.get_options().fetch >= 1
have_changes = False
try:
with LogRunManager(env.get_logging_database_connection(), reponame, 'fetch') as runlogger:
have_changes = env.get_repo_processor().fetch([reponame], update=allow_update, logger=runlogger)
if not have_changes:
runlogger.set_no_changes()
env.get_main_logger().get_indented().log('done' + ('' if have_changes else ' (no changes)'))
except KeyboardInterrupt:
raise
except Exception as e:
env.get_main_logger().get_indented().log('failed: ' + str(e), severity=Logger.ERROR)
if env.get_options().fatal:
raise
if not have_changes:
database.update_repository_ruleset_hash(reponame, old_hash)
database.mark_repository_fetched(reponame)
database.commit()
if env.get_options().parse:
ruleset = env.get_ruleset()
ruleset_hash_changed = ruleset.get_hash() != database.get_repository_ruleset_hash(reponame)
if ruleset_hash_changed:
env.get_main_logger().log('parsing {}'.format(reponame))
elif env.get_options().parse >= 2:
env.get_main_logger().log('parsing {} (forced)'.format(reponame))
else:
env.get_main_logger().log('not parsing {} due to no data changes since last run'.format(reponame))
continue
# likewise, make sure hash is reset until the source is successfully reparsed
database.update_repository_ruleset_hash(reponame, None)
database.commit()
try:
transformer = PackageTransformer(ruleset, reponame, repository.ruleset)
with LogRunManager(env.get_logging_database_connection(), reponame, 'parse') as runlogger:
env.get_repo_processor().parse([reponame], transformer=transformer, logger=runlogger)
env.get_main_logger().get_indented().log('done')
transformer.finalize()
except KeyboardInterrupt:
raise
except Exception as e:
env.get_main_logger().get_indented().log('failed: ' + str(e), severity=Logger.ERROR)
if env.get_options().fatal:
raise
database.update_repository_ruleset_hash(reponame, ruleset.get_hash())
database.mark_repository_parsed(reponame)
database.commit()
def database_init(env: Environment) -> None:
logger = env.get_main_logger()
database = env.get_main_database_connection()
logger.log('(re)initializing database schema')
database.create_schema_types()
database.create_schema_functions()
database.create_schema_tables()
logger.get_indented().log('committing changes')
database.commit()
def update_repositories(env: Environment) -> None:
logger = env.get |
schleichdi2/OPENNFR-6.3-CORE | bitbake/lib/toaster/toastergui/tables.py | Python | gpl-2.0 | 63,230 | 0.001661 | #
# BitBake Toaster Implementation
#
# Copyright (C) 2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
from toastergui.widgets import ToasterTable
from orm.models import Recipe, ProjectLayer, Layer_Version, Machine, Project
from orm.models import CustomImageRecipe, Package, Target, Build, LogMessage, Task
from orm.models import CustomImagePackage, Package_DependencyManager
from orm.models import Distro
from django.db.models import Q, Max, Sum, Count, When, Case, Value, IntegerField
from django.conf.urls import url
from django.core.urlresolvers import reverse, resolve
from django.http import HttpResponse
from django.views.generic import TemplateView
from toastergui.tablefilter import TableFilter
from toastergui.tablefilter import TableFilterActionToggle
from toastergui.tablefilter import TableFilterActionDateRange
from toastergui.tablefilter import TableFilterActionDay
import os
class ProjectFilters(object):
@staticmethod
def in_project(project_layers):
return Q(layer_version__in=project_layers)
@staticmethod
def not_in_project(project_layers):
return ~(ProjectFilters.in_project(project_layers))
class LayersTable(ToasterTable):
"""Table of layers in Toaster"""
def __init__(self, *args, **kwargs):
super(LayersTable, self).__init__(*args, **kwargs)
self.default_orderby = "layer__name"
self.title = "Compatible layers"
def get_context_data(self, **kwargs):
context = super(LayersTable, self).get_context_data(**kwargs)
project = Project.objects.get(pk=kwargs['pid'])
context['project'] = project
return context
def setup_filters(self, *args, **kwargs):
project = Project.objects.get(pk=kwargs['pid'])
self.project_layers = ProjectLayer.objects.filter(project=project)
in_current_project_filter = TableFilter(
"in_current_project",
"Filter by project layers"
)
criteria = Q(projectlayer__in=self.project_layers)
in_project_action = TableFilterActionToggle(
"in_project",
"Layers added to this project",
criteria
)
not_in_project_action = TableFilterActionToggle(
"not_in_project",
"Layers not added to this project",
~criteria
)
in_current_project_filter.add_action(in_project_action)
in_current_project_filter.add_action(not_in_project_action)
self.add_filter(in_current_project_filter)
def setup_queryset(self, *args, **kwargs):
prj = Project.objects.get(pk = kwargs['pid'])
compatible_layers = prj.get_all_compatible_layer_versions()
self.static_context_extra['current_layers'] = \
prj.get_project_layer_versions(pk=True)
self.queryset = compatible_layers.order_by(self.default_orderby)
def setup_columns(self, *args, **kwargs):
layer_link_template = '''
<a href="{% url 'layerdetails' extra.pid data.id %}">
{{data.layer.name}}
</a>
'''
self.add_column(title="Layer",
hideable=False,
orderable=True,
static_data_name="layer__name",
static_data_template=layer_link_template)
self.add_column(title="Summary",
field_name="layer__summary")
git_url_template = '''
<a href="{% url 'layerdetails' extra.pid data.id %}">
{% if data.layer.local_source_dir %}
<code>{{data.layer.local_source_dir}}</code>
{% else %}
<code>{{data.layer.vcs_url}}</code>
</a>
{% endif %}
{% if data.get_vcs_link_url %}
<a target="_blank" href="{{ data.get_vcs_link_url }}">
<span class="glyphicon glyphicon-new-window"></span>
</a>
{% endif %}
'''
self.add_column(title="Layer source code location",
help_text="A Git repository or an absolute path to a directory",
hidden=True,
static_data_name="layer__vcs_url",
static_data_template=git_url_template)
git_dir_template = '''
{% if data.layer.local_source_dir %}
<span class="text-muted">Not applicable</span>
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{data.layer.name}} is not in a Git repository, so there is no subdirectory associated with it"> </span>
{% else %}
<a href="{% url 'layerdetails' extra.pid data.id %}">
<code>{{data.dirpath}}</code>
</a>
{% endif %}
{% if data.dirpath and data.get_vcs_dirpath_link_url %}
<a target="_blank" href="{{ data.get_vc | s_dirpath_link_url }}">
<span class="glyphicon glyphicon-new-window"></span>
</a>
{% endif %}'''
self.add_column(title="Subdirectory",
help_text="The layer directory within the Git repository",
hidd | en=True,
static_data_name="git_subdir",
static_data_template=git_dir_template)
revision_template = '''
{% if data.layer.local_source_dir %}
<span class="text-muted">Not applicable</span>
<span class="glyphicon glyphicon-question-sign get-help" data-original-title="" title="The source code of {{data.layer.name}} is not in a Git repository, so there is no revision associated with it"> </span>
{% else %}
{% with vcs_ref=data.get_vcs_reference %}
{% include 'snippets/gitrev_popover.html' %}
{% endwith %}
{% endif %}
'''
self.add_column(title="Git revision",
help_text="The Git branch, tag or commit. For the layers from the OpenEmbedded layer source, the revision is always the branch compatible with the Yocto Project version you selected for this project",
static_data_name="revision",
static_data_template=revision_template)
deps_template = '''
{% with ods=data.dependencies.all%}
{% if ods.count %}
<a class="btn btn-default" title="<a href='{% url "layerdetails" extra.pid data.id %}'>{{data.layer.name}}</a> dependencies"
data-content="<ul class='list-unstyled'>
{% for i in ods%}
<li><a href='{% url "layerdetails" extra.pid i.depends_on.pk %}'>{{i.depends_on.layer.name}}</a></li>
{% endfor %}
</ul>">
{{ods.count}}
</a>
{% endif %}
{% endwith %}
'''
self.add_column(title="Dependencies",
help_text="Other layers a layer depends upon",
static_data_name="dependencies",
static_data_template=deps_template)
self.add_column(title="Add | Remove",
help_text="Add or remove layers to / from your project",
hideable=False,
filter_name="in_current_project",
static_data_name="add-del-layers",
static_data_template='{% include "layer_btn.html" %}')
class MachinesTable(ToasterTable):
"""Table of Machines in Toaster"""
def __init__(self, *args, **kwargs):
super(MachinesTable, self).__init__(*args, **kwargs)
self.empty_state = "Toaster has no machine information for this project. Sadly, machine information cannot be obtained from builds, so this page will remain empty."
self.title = "Compatible machines"
self.default_orderby = "name"
def get_context_data(self, **kwargs):
context = super(MachinesTable, self).get_context_data(**kwargs)
context['project'] = Project.objects.get(pk=kwargs['pid'])
return context
def setup_filters(self, *args, **kwargs):
project = Project.objects.get(pk=kwargs['pid'])
in_current_project_filter = TableFilter(
"in_current_project",
"F |
slinderman/pyhsmm_spiketrains | experiments/make_figure7.py | Python | mit | 3,751 | 0.003999 | import os
import cPickle
import gzip
from collections import namedtuple
import numpy as np
import matplotlib
matplotlib.rcParams.update({'axes.labelsize': 9,
'xtick.labelsize' : 9,
'ytick.labelsize' : 9,
'axes.titlesize' : 11})
import matplotlib.pyplot as plt
import brewer2mpl
allcolors = brewer2mpl.get_map("Set1", "Qualitative", 9).mpl_colors
from hips.plotting.layout import create_axis_at_location, create_figure
from experiment_helper import load_hipp_data
Results = namedtuple(
"Results", ["name", "loglikes", "predictive_lls",
"N_used", "alphas", "gammas",
"rates", "obs_hypers",
"samples", "timestamps"])
def plot_results(alpha_a_0s, Ks_alpha_a_0,
gamma_a_0s, Ks_gamma_a_0,
figdir="."):
# Plot the number of inferred states as a function of params
fig = create_figure((5,1.5) | )
ax = create_axis_at_location(fig, 0.6, 0.5, 1.7, .8, transparent=True)
plt.figtext(0.05/5, 1.25/1.5, "A")
ax.box | plot(Ks_alpha_a_0, positions=np.arange(1,1+len(alpha_a_0s)),
boxprops=dict(color=allcolors[1]),
whiskerprops=dict(color=allcolors[0]),
flierprops=dict(color=allcolors[1]))
ax.set_xticklabels(alpha_a_0s)
plt.xlim(0.5,4.5)
plt.ylim(40,90)
# plt.yticks(np.arange(0,101,20))
ax.set_xlabel("$a_{\\alpha_0}$")
ax.set_ylabel("Number of States")
ax = create_axis_at_location(fig, 3.1, 0.5, 1.7, .8, transparent=True)
plt.figtext(2.55/5, 1.25/1.5, "B")
ax.boxplot(Ks_gamma_a_0, positions=np.arange(1,1+len(gamma_a_0s)),
boxprops=dict(color=allcolors[1]),
whiskerprops=dict(color=allcolors[0]),
flierprops=dict(color=allcolors[1]))
ax.set_xticklabels(gamma_a_0s)
plt.xlim(0.5,4.5)
plt.ylim(40,90)
# plt.yticks(np.arange(0,101,20))
ax.set_xlabel("$a_{\\gamma}$")
ax.set_ylabel("Number of States")
plt.savefig(os.path.join(figdir, "figure7.pdf"))
plt.savefig(os.path.join(figdir, "figure7.png"))
# Figure 7: Hippocampal inference trajectories
dataset = "hipp_2dtrack_a"
N, S_train, pos_train, S_test, pos_test, center, radius = \
load_hipp_data(dataname=dataset)
# Load results
runnum = 1
results_dir = os.path.join("results", dataset, "run%03d" % runnum)
# Load alpha_a_0 results
alpha_a_0s = [1.0, 5.0, 10.0, 100.0]
alpha_a_0_results = []
for alpha_a_0 in alpha_a_0s:
results_type = "hdphmm_scale_alpha_a_0%.1f" % alpha_a_0
print "Loading ", results_type
results_file = os.path.join(results_dir, results_type + ".pkl.gz")
with gzip.open(results_file, "r") as f:
results = cPickle.load(f)
alpha_a_0_results.append(results.N_used[-2000:])
gamma_a_0s = [1.0, 5.0, 10.0, 100.0]
gamma_a_0_results = []
for gamma_a_0 in gamma_a_0s:
results_type = "hdphmm_scale_gamma_a_0%.1f" % gamma_a_0
print "Loading ", results_type
results_file = os.path.join(results_dir, results_type + ".pkl.gz")
with gzip.open(results_file, "r") as f:
results = cPickle.load(f)
gamma_a_0_results.append(results.N_used[-2000:])
# alpha_obss = [0.1, 0.5, 1.0, 2.0, 2.5, 5.0, 10.0]
# alpha_obs_results = []
# for alpha_obs in alpha_obss:
# results_type = "hdphmm_scale_alpha_obs%.1f" % alpha_obs
# print "Loading ", results_type
# results_file = os.path.join(results_dir, results_type + ".pkl.gz")
# with gzip.open(results_file, "r") as f:
# results = cPickle.load(f)
#
# alpha_obs_results.append(results.N_used[-2000:])
plot_results(alpha_a_0s, alpha_a_0_results,
gamma_a_0s, gamma_a_0_results,
figdir=results_dir)
|
SylvainDe/DidYouMean-Python | didyoumean/__init__.py | Python | mit | 66 | 0 | """Empt | y file. Might grow in the future."""
import didyoumean_ap | i
|
crainiarc/poker-ai-planner | agents/adaptive_play_bot.py | Python | mit | 99 | 0.020202 | from human_bot import HumanBo | t
class AdaptivePlayBot(HumanBot):
| def __init(self):
pass |
tomviner/unlimited-weeks-in-google-calendar | tests/test_ext.py | Python | mit | 2,010 | 0.000995 | import time
CALENDAR_URL = 'https://calendar.google.com/calendar'
BUTTONS_CLASS = 'gcal-unlim-weeks-adjust-weeks'
ADD_BUTTON_CLASS = 'gcal-unlim-weeks-add-week'
REMOVE_BUTTON_CLASS = 'gcal-unlim-weeks-remove-week'
def get_num_weeks(selenium):
weeks = selenium.find_elements_by_class_name('month-row')
return len(weeks)
def inject_extension(selenium):
# extensions cannot be loaded in headless mode so run scripts directly
time.sleep(1)
jquery_js = open('ext/src/inject/jquery.min.js').read()
selenium.execute_script(jquery_js)
inject_js = open('ext/src/inject/compiled.min.js').read()
selenium.execute_script(inject_js)
time.sleep(1)
def test_load_with_no_button_checked(selenium, authed_get, is_headless):
# this hash leaves the toolbar without any buttons checked
no_checked_buttons_url = \
'https://calendar.google.com/calendar/render#main_7%7Ccustom,28'
authed_get(no_checked_buttons_url)
if is_headless:
inject_extension(selenium)
ext_buttons = selenium.find_elements_by_class_name(BUTTONS_CLASS)
assert len(ext_buttons) == 2
def test_ext(selenium, authed_get, is_headless):
authed_get(CALENDAR_URL)
if is_headless:
inject_extension(selenium)
add_button = selenium.find_element_by_class_name(ADD_BUTTON_CLASS)
remove_button = selenium.find_element_by_class_name(REMOVE_BUTTON_CLASS)
# reset to a known state
# click the custom view button
selenium.find_elements_by_css_selector(
'#topRightNavigation .goog-imageless-button')[3].click()
while get_num_weeks(selenium) > 2:
remove_button.click()
old_num_weeks = get_num_weeks(selenium)
for delta in (1, 2, -3, 1):
if delta < 0:
button = remove_button
| else:
button = add_button
for _ in range(abs(delta)):
button.click() |
num_weeks = get_num_weeks(selenium)
assert num_weeks == old_num_weeks + delta
old_num_weeks = num_weeks
|
croxis/SpaceDrive | spacedrive/renderpipeline/rpcore/stages/gbuffer_stage.py | Python | mit | 2,402 | 0.000833 | """
RenderPipeline
Copyright (c) 2014-2016 tobspr <tobias.springer1@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from rpcore.globals import Globals
from rpcore.render_stage import RenderStage
from rpcore.util.shader_input_blocks import SimpleInputBlock
class GBufferStage(RenderStage):
""" This is the main pass stage, rendering the objects and creating the
GBuffer which is used in later stages """
required_inputs = ["DefaultEnvmap"]
required_pipes = []
@property
def produced_pipes(self):
return {
"GBuffer": self.make_gbuffer_ubo(),
"SceneDepth": self.target.depth_tex
} |
def make_gbuffer_ubo(self):
ubo = SimpleInputBlock("GBuffer")
ubo.add_input("Depth", self.target.depth_tex)
ubo.add_input("Data0", self.target.color_tex)
ubo.add_input("Data1", self.target.aux_tex[0])
ubo.add_input("Data2", self.target.aux_tex[1])
return ubo
def create(self):
self.target = self.create_target("GBuffer")
self.target.add_color_attachment(bits=16, alpha=True)
self.target. | add_depth_attachment(bits=32)
self.target.add_aux_attachments(bits=16, count=2)
self.target.prepare_render(Globals.base.cam)
def set_shader_input(self, *args):
Globals.render.set_shader_input(*args)
|
TileDB-Inc/TileDB | doc/source/conf.py | Python | mit | 6,868 | 0.002621 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# TileDB documentation build configuration file, created by
# sphinx-quickstart on Mon Feb 5 10:08:37 2018.
#
# -- Imports configuration -------------------------------------------------
import os
import subprocess
import sys
from os.path import abspath, join, dirname
sys.path.insert(0, abspath(join(dirname(__file__))))
# -- ReadTheDocs configuration ---------------------------------------------
# Special handling on ReadTheDocs builds.
# Some of this code is from https://github.com/robotpy/robotpy-docs/blob/master/conf.py
readthedocs = os.environ.get('READTHEDOCS', None) == 'True'
rtd_version = os.environ.get('READTHEDOCS_VERSION', 'latest')
rtd_version = rtd_version if rtd_ver | sion in ['stable', 'latest'] else 'stable'
# On RTD, build the Doxygen XML files.
if readthedocs:
# Build docs
subprocess.check_call(''' |
mkdir ../../build;
cd ../../build;
../bootstrap;
make doc;
''', shell=True)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinxcontrib.contentui',
'breathe'
]
if readthedocs:
# Mapping for linking between RTD subprojects.
intersphinx_mapping = {
'tiledb': ('https://tiledb-inc-tiledb.readthedocs-hosted.com/en/%s/' % rtd_version, None),
'tiledb-py': ('https://tiledb-inc-tiledb.readthedocs-hosted.com/projects/python-api/en/%s/' % rtd_version, None)
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'TileDB'
copyright = '2021 TileDB, Inc'
author = 'TileDB, Inc.'
# The short X.Y version.
version = '2.8'
# The full version, including alpha/beta/rc tags.
release = '2.8.0'
# Breathe extension configuration.
tiledb_dir = '../../'
doxygen_xml_dir = tiledb_dir + 'build/xml/'
breathe_projects = {'TileDB-C': doxygen_xml_dir, 'TileDB-C++': doxygen_xml_dir}
breathe_default_project = 'TileDB-C'
breathe_projects_source = {
'TileDB-C': (tiledb_dir + 'tiledb/sm/c_api/', ['tiledb.h']),
'TileDB-C++': (tiledb_dir + 'tiledb/sm/cpp_api/', ['tiledb'])
}
breathe_domain_by_file_pattern = {
'*/c_api/tiledb.h': 'c',
'*/cpp_api/tiledb': 'cpp'
}
# Allow parsing TILEDB_DEPRECATED in C++ function signatures.
cpp_id_attributes = ['TILEDB_DEPRECATED']
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'friendly'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
html_static_path = ['_static']
html_logo = '_static/tiledb-logo_color_no_margin_@4x.png'
html_favicon = '_static/favicon.ico'
if readthedocs:
html_theme = 'default'
else:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'TileDBdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'TileDB.tex', 'TileDB Documentation',
'TileDB, Inc.', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'tiledb', 'TileDB Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'TileDB', 'TileDB Documentation',
author, 'TileDB', 'One line description of project.',
'Miscellaneous'),
]
# -- Custom Document processing ----------------------------------------------
# Generate the sidebar automatically so that it is identical across all subprojects.
# This (and gensidebar.py) from https://github.com/robotpy/robotpy-docs
import gensidebar
gensidebar.generate_sidebar({'on_rtd': readthedocs, 'rtd_version': rtd_version}, 'tiledb')
# Replace C/C++ source examples path
def replaceText(app, docname, source):
result = source[0]
for key in app.config.text_replacements:
result = result.replace(key, app.config.text_replacements[key])
source[0] = result
text_replacements = {
"{source_examples_path}" : "../../examples",
"{tiledb_src_root_url}" : "https://github.com/TileDB-Inc/TileDB/blob/dev",
"{tiledb_py_src_root_url}" : "https://github.com/TileDB-Inc/TileDB-Py/blob/dev",
"{tiledb_R_src_root_url}" : "https://github.com/TileDB-Inc/TileDB-R/blob/master",
"{tiledb_go_src_root_url}" : "https://github.com/TileDB-Inc/TileDB-Go/blob/master"
}
# -- Custom setup -----------------------------------------------------------
def add_custom_js(app):
app.add_javascript('custom.js')
def setup(app):
app.add_config_value('text_replacements', {}, True)
app.connect('source-read', replaceText)
app.add_stylesheet('custom.css')
# Use this event so that our custom JS gets included *after* the ContentUI
# extension adds its JS, otherwise we can't override its behavior.
app.connect('builder-inited', add_custom_js)
|
kmee/l10n-brazil | l10n_br_fiscal/models/nbm.py | Python | agpl-3.0 | 1,671 | 0 | # Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, fields, models
from ..tools import misc
class Nbm(models.Model):
_name = 'l10n_br_fiscal.nbm'
_inherit = 'l10n_br_fiscal.data.product.abstract'
_description = 'NBM'
code = fields.Char(
size=12)
code_unmasked = fields.Char(
size=10)
name = fields.Text(
string='Name',
required=True,
index=True)
product_tmpl_ids = fields.One2many(
inverse_name='nbm_id')
ncms = fields.Char(
string='NCM')
ncm_ids = fields.Many2many(
comodel_name='l10n_br_fiscal.ncm',
relation='fiscal_nbm_ncm_rel',
colunm1='nbm_id',
colunm2='ncm_id',
readonly=True,
string='NCMs')
@api.model
def create(self, values):
create_super = super(Nbm, self).create(values)
if 'ncms' in values.keys():
create_super.with_context(do_not_write=True).action_search_ncms()
return crea | te_super
@api.multi
def write(self, values):
write_super = super(Nbm, self).write(values)
do_not_write = self.env.context.get('do_not_write')
| if 'ncms' in values.keys() and not do_not_write:
self.with_context(do_not_write=True).action_search_ncms()
return write_super
@api.multi
def action_search_ncms(self):
ncm = self.env['l10n_br_fiscal.ncm']
for r in self:
if r.ncms:
domain = misc.domain_field_codes(field_codes=r.ncms)
r.ncm_ids = ncm.search(domain)
|
wangshunzi/Python_code | 02-Python面向对象代码/面向对象-基础/classDesc.py | Python | mit | 728 | 0.005597 |
class Person:
"""
关于这个类的描述, 类的作用, 类的构造函数等等 | ; 类属性的描述
Attributes:
count: int 代表是人的个数
"""
# 这个表示, 是人的个数
count = 1
def run(self, distance, step):
"""
这个方法的作用效果
:param distance: 参数的含义, 参数的类型int, 是否有默认值
:param step:
:return: 返回的结果的含义(时间), 返回数据的类型int
"""
print("人在跑")
return distance / step
def __init__(self):
self.__name = "sz"
# help(Person)
def xxx():
"""
| 这是一个xxx函数, 有xxx作用
:return:
"""
print("xxx")
|
cloudera/hue | desktop/core/ext-py/openpyxl-2.6.4/openpyxl/worksheet/errors.py | Python | apache-2.0 | 2,435 | 0.002464 | #Autogenerated schema
from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.descriptors import (
Typed,
String,
Bool,
Sequence,
)
from openpyxl.descriptors.excel import CellRange
class Extension(Serialisable):
tagname = "extension"
uri = String(allow_none=True)
def __init__(self,
uri=None,
):
self.uri = uri
class ExtensionList(Serialisable):
tagname = "extensionList"
# uses element group EG_ExtensionList
ext = Sequence(expected_type=Extension)
__elements__ = ('ext',)
def __init__(self,
ext=(),
):
self.ext = ext
class IgnoredError(Serialisable):
tagname = "ignoredError"
sqref = CellRange
evalError = Bool(allow_none=True)
twoDigitTextYear = Bool(allow_none=True)
numberStoredAsText = Bool(allow_none=True)
formula = Bool(allow_none=True)
formulaRange = Bool(allow_none=True)
unlockedFormula = Bool(allow_none=True)
emptyCellReference = Bool(allow_none=True)
listDataValidation = Bool(allow_none=True)
calculatedColumn = Bool(allow_none=True)
def __init__(self,
sqref=None,
evalError=False,
twoDigitTextYear=False,
numberStoredAsText=False,
formula=False,
formulaRange=False,
unlockedFormula=False,
emptyCellReference=False,
listDataValidation=False,
calculatedColumn=False,
):
self.sqref = sqref
self.evalError = evalError
self.twoDigitTextYear = twoDigitTextYear
self.numberStoredAsText = n | umberStoredAsText
self.formula = formula
self.formulaRange = formulaRange
self.unlockedFormula = unlockedFormula
self.emptyCellReference = emptyCellReference
self.listDataValidation = listDataVal | idation
self.calculatedColumn = calculatedColumn
class IgnoredErrors(Serialisable):
tagname = "ignoredErrors"
ignoredError = Sequence(expected_type=IgnoredError)
extLst = Typed(expected_type=ExtensionList, allow_none=True)
__elements__ = ('ignoredError', 'extLst')
def __init__(self,
ignoredError=(),
extLst=None,
):
self.ignoredError = ignoredError
self.extLst = extLst
|
owlabs/incubator-airflow | airflow/example_dags/example_bash_operator.py | Python | apache-2.0 | 2,045 | 0 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from builtins import range
from datetime import timedelta
from airflow.models import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.utils.dates import days_ago
args = {
'owner': 'Airflow',
'start_date': days_ago(2),
}
dag = DAG(
dag_id='example_bash_operator',
default_args=args,
schedule_interval='0 0 * * *',
dagrun_timeout=timedelta(minutes=60),
tags=['example']
)
run_this_last = DummyOperator(
task_id='run_this_last',
dag=dag,
)
# [START howto_operator_bash]
run_this = BashOper | ator(
task_id='run_after_loop',
bash_command='echo 1',
dag=dag,
)
# [END howto_operator_bash]
run_this >> run_this_last
for i in range(3):
task = BashOperator(
task_id='runme_' + str(i),
bash_command='echo "{{ task_instance_key_str }}" && sleep 1',
dag=dag,
)
task >> run_this
# [START howto_operator_bash_tem | plate]
also_run_this = BashOperator(
task_id='also_run_this',
bash_command='echo "run_id={{ run_id }} | dag_run={{ dag_run }}"',
dag=dag,
)
# [END howto_operator_bash_template]
also_run_this >> run_this_last
if __name__ == "__main__":
dag.cli()
|
kristinriebe/django-prov_vo | tests/travis.local.py | Python | apache-2.0 | 43 | 0 | import | sys
sys.path.append('dja | ngo-vosi/')
|
kanarelo/dairy | dairy/core/migrations/0009_auto_20151128_1236.py | Python | apache-2.0 | 992 | 0.002016 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0008_auto_20151124_1135'),
]
operations = [
migrations.AddField(
mode | l_name='service',
name='short_ | name',
field=models.CharField(max_length=20, null=True),
),
migrations.AlterField(
model_name='product',
name='unit',
field=models.IntegerField(choices=[(1, b'kg'), (2, b'L')]),
),
migrations.AlterField(
model_name='supplierservice',
name='service',
field=models.ForeignKey(related_name='service_suppliers', to='core.Service'),
),
migrations.AlterField(
model_name='supplierservice',
name='supplier',
field=models.ForeignKey(related_name='supplier_services', to='core.Supplier'),
),
]
|
unreal666/outwiker | src/outwiker/utilites/collections.py | Python | gpl-3.0 | 322 | 0 | # -*- coding | : utf-8 -*-
def update_recent(items, new_item, max_count):
'''
Move or insert a new_item to begin of items.
Return new list
'''
result = items[:]
if new_item in result:
result.remove(new_item)
result.insert(0, new_item)
result = resu | lt[:max_count]
return result
|
speksi/python-mingus | mingus/containers/composition.py | Python | gpl-3.0 | 3,367 | 0.000891 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# mingus - Music theory Python package, composition module.
# Copyright (C) 2008-2009, Bart Spaans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from mt_exceptions import UnexpectedObjectError
class Composition(object):
"""A composition object.
The Composition class is a datastructure for working with Tracks.
Composition can be stored together in Suites.
"""
title = 'Untitled'
subtitle = ''
author = ''
email = ''
description = ''
tracks = []
selected_tracks = []
def __init__(self):
self.empty()
def empty(self):
"""Remove all the tracks from this class."""
self.tracks = []
def reset(self):
"""Reset the information in this class.
Remove the track and composer information.
"""
self.empty()
self.set_title()
self.set_author()
def add_track(self, track):
"""Add a track to the composition.
Raise an UnexpectedObjectError if the argument is not a
mingus.containers.Track object.
"""
if not hasattr(track, 'bars'):
raise UnexpectedObjectError("Unexpected object '%s', "
"expecting a mingus.containers.Track object" % track)
self.tracks.append(track)
self.selected_tracks = [len(self.tracks) - 1]
def add_note(self, note):
"""Add a note to the selected tracks.
Everything container.Track supports in __add__ is accepted.
"""
for n in self.selected_tracks:
self.tracks[n] + note
def set_title(self, title='Untitled', subtitle=''):
"""Set the title and subtitle of the piece."""
self.title = title
self.su | btitle = subtitle
def set_author(self, author='', email=''):
"""Set the title and author of the piece."""
self.author = author
self.email = email
def __add__(self, | value):
"""Enable the '+' operator for Compositions.
Notes, note strings, NoteContainers, Bars and Tracks are accepted.
"""
if hasattr(value, 'bars'):
return self.add_track(value)
else:
return self.add_note(value)
def __getitem__(self, index):
"""Enable the '[]' notation."""
return self.tracks[index]
def __setitem__(self, index, value):
"""Enable the '[] =' notation."""
self.tracks[index] = value
def __len__(self):
"""Enable the len() function."""
return len(self.tracks)
def __repr__(self):
"""Return a string representing the class."""
result = ''
for x in self.tracks:
result += str(x)
return result
|
NetApp/manila | manila/share/drivers/helpers.py | Python | apache-2.0 | 21,481 | 0.000047 | # Copyright 2015 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
from oslo_log import log
from manila.common import constants as const
from manila import exception
from manila.i18n import _, _LW
from manila import utils
LOG = log.getLogger(__name__)
class NASHelperBase(object):
"""Interface to work with share."""
def __init__(self, execute, ssh_execute, config_object):
self.configuration = config_object
self._execute = execute
self._ssh_exec = ssh_execute
def init_helper(self, server):
pass
def create_export(self, server, share_name, recreate=False):
"""Create new export, delete old one if exists."""
raise NotImplementedError()
def remove_export(self, server, share_name):
"""Remove export."""
raise NotImplementedError()
| def configure_access(self, server, share_name):
"""Configure server before allowing access."""
pass
def update_access(self, server, share_name, access_rul | es, add_rules,
delete_rules):
"""Update access rules for given share.
This driver has two different behaviors according to parameters:
1. Recovery after error - 'access_rules' contains all access_rules,
'add_rules' and 'delete_rules' shall be empty. Previously existing
access rules are cleared and then added back according
to 'access_rules'.
2. Adding/Deleting of several access rules - 'access_rules' contains
all access_rules, 'add_rules' and 'delete_rules' contain rules which
should be added/deleted. Rules in 'access_rules' are ignored and
only rules from 'add_rules' and 'delete_rules' are applied.
:param server: None or Share server's backend details
:param share_name: Share's path according to id.
:param access_rules: All access rules for given share
:param add_rules: Empty List or List of access rules which should be
added. access_rules already contains these rules.
:param delete_rules: Empty List or List of access rules which should be
removed. access_rules doesn't contain these rules.
"""
raise NotImplementedError()
@staticmethod
def _verify_server_has_public_address(server):
if 'public_address' not in server:
raise exception.ManilaException(
_("Can not get 'public_address' for generation of export."))
def get_exports_for_share(self, server, old_export_location):
"""Returns list of exports based on server info."""
raise NotImplementedError()
def get_share_path_by_export_location(self, server, export_location):
"""Returns share path by its export location."""
raise NotImplementedError()
def disable_access_for_maintenance(self, server, share_name):
"""Disables access to share to perform maintenance operations."""
def restore_access_after_maintenance(self, server, share_name):
"""Enables access to share after maintenance operations were done."""
@staticmethod
def validate_access_rules(access_rules, allowed_types, allowed_levels):
"""Validates access rules according to access_type and access_level.
:param access_rules: List of access rules to be validated.
:param allowed_types: tuple of allowed type values.
:param allowed_levels: tuple of allowed level values.
"""
for access in (access_rules or []):
access_type = access['access_type']
access_level = access['access_level']
if access_type not in allowed_types:
reason = _("Only %s access type allowed.") % (
', '.join(tuple(["'%s'" % x for x in allowed_types])))
raise exception.InvalidShareAccess(reason=reason)
if access_level not in allowed_levels:
raise exception.InvalidShareAccessLevel(level=access_level)
def _get_maintenance_file_path(self, share_name):
return os.path.join(self.configuration.share_mount_path,
"%s.maintenance" % share_name)
def nfs_synchronized(f):
def wrapped_func(self, *args, **kwargs):
key = "nfs-%s" % args[0].get("lock_name", args[0]["instance_id"])
# NOTE(vponomaryov): 'external' lock is required for DHSS=False
# mode of LVM and Generic drivers, that may have lots of
# driver instances on single host.
@utils.synchronized(key, external=True)
def source_func(self, *args, **kwargs):
return f(self, *args, **kwargs)
return source_func(self, *args, **kwargs)
return wrapped_func
class NFSHelper(NASHelperBase):
"""Interface to work with share."""
def create_export(self, server, share_name, recreate=False):
"""Create new export, delete old one if exists."""
return ':'.join((server['public_address'],
os.path.join(
self.configuration.share_mount_path, share_name)))
def init_helper(self, server):
try:
self._ssh_exec(server, ['sudo', 'exportfs'])
except exception.ProcessExecutionError as e:
if 'command not found' in e.stderr:
raise exception.ManilaException(
_('NFS server is not installed on %s')
% server['instance_id'])
LOG.error(e.stderr)
def remove_export(self, server, share_name):
"""Remove export."""
def _get_parsed_access_to(self, access_to):
netmask = utils.cidr_to_netmask(access_to)
if netmask == '255.255.255.255':
return access_to.split('/')[0]
return access_to.split('/')[0] + '/' + netmask
@nfs_synchronized
def update_access(self, server, share_name, access_rules, add_rules,
delete_rules):
"""Update access rules for given share.
Please refer to base class for a more in-depth description.
"""
local_path = os.path.join(self.configuration.share_mount_path,
share_name)
out, err = self._ssh_exec(server, ['sudo', 'exportfs'])
# Recovery mode
if not (add_rules or delete_rules):
self.validate_access_rules(
access_rules, ('ip',),
(const.ACCESS_LEVEL_RO, const.ACCESS_LEVEL_RW))
hosts = self._get_host_list(out, local_path)
for host in hosts:
self._ssh_exec(server, ['sudo', 'exportfs', '-u',
':'.join((host, local_path))])
self._sync_nfs_temp_and_perm_files(server)
for access in access_rules:
rules_options = '%s,no_subtree_check'
if access['access_level'] == const.ACCESS_LEVEL_RW:
rules_options = ','.join((rules_options, 'no_root_squash'))
self._ssh_exec(
server,
['sudo', 'exportfs', '-o',
rules_options % access['access_level'],
':'.join((self._get_parsed_access_to(access['access_to']),
local_path))])
self._sync_nfs_temp_and_perm_files(server)
# Adding/Deleting specific rules
else:
self.validate_access_rules(
add_rules, ('ip',),
(const.ACCESS_LEVEL_RO, const.ACCESS_LEVEL_RW))
for access in delete_rules:
access['access_to'] = self._get_parsed_access_to(
|
hjanime/VisTrails | vistrails/db/versions/v1_0_4/domain/auto_gen.py | Python | bsd-3-clause | 767,323 | 0.005515 | ###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""generated automatically by auto_dao.py"""
from __future__ import division
import copy
class DBOpmWasGeneratedBy(object):
vtType = 'opm_was_generated_by'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasGeneratedBy.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasGeneratedBy()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasGeneratedBy()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmArtifactIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmArtifactIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmProcessIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmProcessIdCause.update_version(obj, trans_dict)
| new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, | trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is n |
eballetbo/igep_qa | igep_qa/helpers/omap.py | Python | mit | 4,552 | 0.004613 | #!/usr/bin/env python
"""
This provides various OMAP/IGEP related helper functions.
"""
from igep_qa.helpers.common import QMmap, QCpuinfo, QDeviceTree
import commands
import time
def cpu_is_omap5():
""" Returns True if machine is OMAP5, otherwise returns False
"""
return QDeviceTree().compatible("ti,omap5")
def omap3_get_dieid():
""" Single die identifier for OMAP processors
Returns the die number in hexadecimal format
See AM/DM37x Multimedia Device Silicon Revision 1.x TRM
"""
registers = [0x4830A224, 0x4830A220, 0x4830A21C, 0x4830A218]
mm = QMmap()
return "".join(mm.read(addr) for addr in registers)
def omap5_get_dieid():
""" Single die identifier for OMAP5 processors
Returns the die number in hexadecimal format
See OMAP543x Multimedia Device TRM Revision X
"""
registers = [0x4A002210, 0x4A00220C, 0x4A002208, 0x4A002200]
mm = QMmap()
return "".join(mm.read(addr) for addr in registers)
def machine_is_igep0020():
""" Returns True if machine is igep0020, otherwise returns False
"""
cpu = QCpuinfo()
if cpu["Hardware"] == "IGEP0020 board":
return True
# otherwise
return False
def machine_is_igep0030():
""" Returns True if machine is igep0030, otherwise returns False.
"""
cpu = QCpuinfo()
if cpu["Hardware"] == "IGEP0030 COM":
return True
# otherwise
return False
def machine_is_igep0032():
""" Returns True if machine is igep0032, otherwise returns False
"""
cpu = QCpuinfo()
if cpu["Hardware"] == "IGEP0032 COM":
return True
# otherwise
return False
def buddy_is_igep0022():
""" Returns True if buddy is igep0022, otherwise returns False.
"""
fd = open("/proc/cmdline", "r")
for opts in fd.readline().split(" "):
if "buddy=igep0022" in opts:
fd.close()
return True
# otherwise
fd.close()
return False
def buddy_is_base0010():
""" Returns True if buddy is base0010, otherwise returns False
"""
fd = open("/proc/cmdline", "r")
for opts in fd.readline().split(" "):
if "buddy=base0010" in opts:
fd.close()
return True
# otherwise
fd.close()
return False
def buddy_is_ilms0015():
""" Returns True if buddy is ilms0015, otherwise returns False.
"""
fd = open("/proc/cmdline", "r")
for opts in fd.readline().split(" "):
if "buddy=ilms0015" in opts:
fd.close()
return True
# otherwise
fd.close()
return False
def igep0050_set_headset_amixer_settings(headset):
""" Set amixer settings to playback/capture via headset,
Make sure that the following amixer settings are done for the corresponding
card (check the card no. by running the command cat /proc/asound/cards).
"""
commands.getoutput("amixer cset -c %s name='Headset Left Playback' 1" % headset)
commands.getoutput("amixer cset -c %s name='Headset Right Playback' 1" % headset)
commands.getoutput("amixer cset -c %s name='Headset Playback Volume' 12" % headset)
commands.getoutput("amixer cset -c %s name='DL1 PDM Switch' 1" % headset)
commands.getoutput("amixer cset -c %s name='Sidetone Mixer Playback' 1" % headset)
commands.getoutput("amixer cset -c %s name='SDT DL Volume' 120" % headset)
commands.getoutput("amixer cset -c %s name='DL1 Mixer Multimedia' 1" % headset)
commands.getoutput("amixer cset -c %s name='DL1 Media Playback Volume' 110" % headset)
commands.getoutput("amixer cset -c %s name='Sidetone Mixer Capture' 1" % headset)
commands.getoutput("amixer sset -c %s 'Analog Left',0 'Headset Mic'" % headset)
commands.getoutput("amixer sset -c %s 'Analog Right',0 'Headset Mic'" % headset)
commands.getoutput("amixer sset -c %s 'AUDUL Media',0 149" % headset)
commands.getoutput("amixer sset -c %s 'Capture',0 4" % headset)
commands.geto | utput("amixer sset -c %s MUX_UL00,0 AMic0" % headset)
commands.getoutput("amixer sset -c %s MUX_UL01,0 AMic1" % headset)
commands.getoutput("amixer sset -c %s 'AMIC UL',0 120" % headset)
def igep0050_power_up_bluetooth():
""" Power Up bluetooth device.
Send a pulse to the BT_EN pin.
"""
commands.getoutput("echo 142 > /sys/class/gpio/export")
commands.getoutput("echo out > /sys/class/gpio/gpio142/direction")
commands.g | etoutput("echo 0 > /sys/class/gpio/gpio142/value")
time.sleep(1)
commands.getoutput("echo 1 > /sys/class/gpio/gpio142/value")
|
vinegret/youtube-dl | youtube_dl/extractor/usatoday.py | Python | unlicense | 2,703 | 0.00259 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor |
from ..utils import (
ExtractorError,
get_element_by_attribute,
parse_duration,
try_get,
update_url_query,
)
from ..compat import compat_str
class USATodayIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?usatoday\.com/(?:[^/]+/)*(?P<id>[^?/#]+)'
_TESTS = [{
# Brightcove Partner ID = 29906170001
'url': 'http://www.usatoday.com/media/cinematic/video/81729424/us-france-warn-syrian-regime-ahead-of-new-peace-ta | lks/',
'md5': '033587d2529dc3411a1ab3644c3b8827',
'info_dict': {
'id': '4799374959001',
'ext': 'mp4',
'title': 'US, France warn Syrian regime ahead of new peace talks',
'timestamp': 1457891045,
'description': 'md5:7e50464fdf2126b0f533748d3c78d58f',
'uploader_id': '29906170001',
'upload_date': '20160313',
}
}, {
# ui-video-data[asset_metadata][items][brightcoveaccount] = 28911775001
'url': 'https://www.usatoday.com/story/tech/science/2018/08/21/yellowstone-supervolcano-eruption-stop-worrying-its-blow/973633002/',
'info_dict': {
'id': '5824495846001',
'ext': 'mp4',
'title': 'Yellowstone more likely to crack rather than explode',
'timestamp': 1534790612,
'description': 'md5:3715e7927639a4f16b474e9391687c62',
'uploader_id': '28911775001',
'upload_date': '20180820',
}
}]
BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/%s/default_default/index.html?videoId=%s'
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(update_url_query(url, {'ajax': 'true'}), display_id)
ui_video_data = get_element_by_attribute('class', 'ui-video-data', webpage)
if not ui_video_data:
raise ExtractorError('no video on the webpage', expected=True)
video_data = self._parse_json(ui_video_data, display_id)
item = try_get(video_data, lambda x: x['asset_metadata']['items'], dict) or {}
return {
'_type': 'url_transparent',
'url': self.BRIGHTCOVE_URL_TEMPLATE % (item.get('brightcoveaccount', '29906170001'), item.get('brightcoveid') or video_data['brightcove_id']),
'id': compat_str(video_data['id']),
'title': video_data['title'],
'thumbnail': video_data.get('thumbnail'),
'description': video_data.get('description'),
'duration': parse_duration(video_data.get('length')),
'ie_key': 'BrightcoveNew',
}
|
tuskar/tuskar-ui | openstack_dashboard/dashboards/admin/hypervisors/views.py | Python | apache-2.0 | 1,366 | 0.001464 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 B1 Systems GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# | under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard.dashboa | rds.admin.hypervisors.tables import \
AdminHypervisorsTable
LOG = logging.getLogger(__name__)
class AdminIndexView(tables.DataTableView):
table_class = AdminHypervisorsTable
template_name = 'admin/hypervisors/index.html'
def get_data(self):
hypervisors = []
try:
hypervisors = api.nova.hypervisor_list(self.request)
except:
exceptions.handle(self.request,
_('Unable to retrieve hypervisor list.'))
return hypervisors
|
weolar/miniblink49 | third_party/WebKit/Source/bindings/scripts/code_generator_v8.py | Python | apache-2.0 | 21,053 | 0.0019 | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Generate Blink V8 bindings (.h and .cpp files).
If run itself, caches Jinja templates (and creates dummy file for build,
since cache filenames are unpredictable and opaque).
This module is *not* concurrency-safe without care: bytecode caching creates
a race condition on cache *write* (crashes if one process tries to read a
partially-written cache). However, if you pre-cache the templates (by running
the module itself), then you can parallelize compiling individual files, since
cache *reading* is safe.
Input: An object of class IdlDefinitions, containing an IDL interface X
Output: V8X.h and V8X.cpp
Design doc: http://www.chromium.org/developers/design-documents/idl-compiler
"""
import os
import posixpath
import re
import sys
# Path handling for libraries and templates
# Paths have to be normalized because Jinja uses the exact template path to
# determine the hash used in the cache filename, and we need a pre-caching step
# to be concurrency-safe. Use absolute path because __file__ is absolute if
# module is imported, and relative if executed directly.
# If paths differ between pre-caching and individual file compilation, the cache
# is regenerated, which causes a race condition and breaks concurrent build,
# since some compile processes will try to read the partially written cache.
module_path, module_filename = os.path.split(os.path.realpath(__file__))
third_party_dir = os.path.normpath(os.path.join(
module_path, os.pardir, os.pardir, os.pardir, os.pardir))
templates_dir = os.path.normpath(os.path.join(
module_path, os.pardir, 'templates'))
# Make sure extension is .py, not .pyc or .pyo, so doesn't depend on caching
module_pyname = os.path.splitext(module_filename)[0] + '.py'
# jinja2 is in chromium's third_party directory.
# Insert at 1 so at front to override system libraries, and
# after path[0] == invoking script dir
sys.path.insert(1, third_party_dir)
import jinja2
from idl_definitions import Visitor
import idl_types
from idl_types import IdlType
import v8_callback_interface
import v8_dictionary
from v8_globals import includes, interfaces
import v8_interface
import v8_types
import v8_union
from v8_utilities import capitalize, cpp_name, conditional_string, v8_class_name
from utilities import KNOWN_COMPONENTS, idl_filename_to_component, is_valid_component_dependency, is_testing_target
def render_template(include_paths, header_template, cpp_template,
template_context, component=None):
template_context['code_generator'] = module_pyname
# Add includes for any dependencies
template_context['header_includes'] = sorted(
template_context['header_includes'])
for include_path in include_paths:
if component:
dependency = idl_filename_to_component(include_path)
assert is_valid_component_dependency(component, dependency)
includes.add(include_path)
template_context['cpp_includes'] = sorted(includes)
header_text = header_template.render(template_context)
cpp_text = cpp_template.render(template_context)
return header_text, cpp_text
def set_global_type_info(info_provider):
interfaces_info = info_provider.interfaces_info
idl_types.set_ancestors(interfaces_info['ancestors'])
IdlType.set_callback_interfaces(interfaces_info['callback_interfaces'])
IdlType.set_dictionaries(interfaces_info['dictionaries'])
IdlType.set_enums(info_provider.enumerations)
IdlType.set_implemented_as_interfaces(interfaces_info['implemented_as_interfaces'])
IdlType.set_garbage_collected_types(interfaces_info['garbage_collected_interfaces'])
IdlType.set_will_be_garbage_collected_types(interfaces_info['will_be_garbage_collected_interfaces'])
v8_types.set_component_dirs(interfaces_info['component_dirs'])
def should_generate_code(definitions):
return definitions.interfaces or definitions.dictionaries
def depends_on_union_types(idl_type):
"""Returns true when a given idl_type depends on union containers
directly.
"""
if idl_type.is_union_type:
return True
if idl_type.is_array_or_sequence_type:
return idl_type.element_type.is_union_type
return False
class TypedefResolver(Visitor):
def __init__(self, info_provider):
self.info_provider = info_provider
def resolve(self, definitions, definition_name):
"""Traverse definitions and resolves typedefs with the actual types."""
self.typedefs = {}
for name, typedef in self.info_provider.typedefs.iteritems():
self.typedefs[name] = typedef.idl_type
self.additional_includes = set()
definitions.accept(self)
self._update_dependencies_include_paths(definition_name)
def _update_dependencies_include_paths(self, definition_name):
interface_info = self.info_provider.interfaces_info[definition_name]
dependencies_include_paths = interface_info['dependencies_include_paths']
for include_path in self.additional_includes:
if include_path not in dependencies_include_paths:
dependencies_include_paths.append(include_path)
def _resolve_typedefs(self, typed_object):
"""Resolve typedefs to actual types in the object."""
for attribute_name in typed_object.idl_type_attributes:
try:
idl_type = getattr(typed_object, attribute_name)
except AttributeError:
continue
if not idl_type:
continue
resolved_idl_type = idl_type.resolve_typedefs(self.typedefs)
if depends_on_union_types(resolved_idl_type):
self.additional_includes.add(
self.info_provider.include_path_for_union_types)
# Need to re-assign the attribute, not just mutate idl_type, since
# type(idl_type) may change.
setattr(typed_object, attribute_name, resolved_idl_type)
def visit | _typed_object(self, typed_object):
self._resolve_typedefs(typed_object)
class CodeGeneratorBase(object):
"""Base class for v8 bindings generator and IDL dictionary impl generator"""
def __init__(self, info_provider, cache_dir, output_dir):
self.info_pro | vider = info_provider
self.jinja_env = initialize_jinja_env(cache_dir)
self.output_dir = output_dir
self.typedef_resolver = TypedefResolver(info_provider)
set_global_type_info(info_provider)
def generate_code(self, definitions, definition_name):
"""Returns .h/.cpp code as ((path, content)...)."""
# Set local type info
if not should_generate_code(definitions |
benh/twesos | src/webui/master/webui.py | Python | bsd-3-clause | 1,208 | 0.022351 | import sys
import bottle
import commands
import datetime
from bottle import route, send_file, template
start_time = datetime.datetime.now()
@route('/')
def index():
bottle.TEMPLATES.clear() # For rapid developme | nt
return template("index", start_time = start_time)
@route('/framework/:id#[0-9-]*#')
def framework(id):
bottle.TEMPLATES.clear() # For rapid development
return template("framework", framework_id = id)
@route('/static/:filename#.*#')
def static(filename):
send_file(filename, root = './webui/static')
@route('/log/:level#[A-Z]*#')
def log_full(level):
send_file('mesos-master.' + level, root = log_dir,
guessmime = False, mimetype = 'text/plain')
@route('/log/:level | #[A-Z]*#/:lines#[0-9]*#')
def log_tail(level, lines):
bottle.response.content_type = 'text/plain'
return commands.getoutput('tail -%s %s/mesos-master.%s' % (lines, log_dir, level))
bottle.TEMPLATE_PATH.append('./webui/master/')
# TODO(*): Add an assert to confirm that all the arguments we are
# expecting have been passed to us, which will give us a better error
# message when they aren't!
init_port = sys.argv[1]
log_dir = sys.argv[2]
bottle.run(host = '0.0.0.0', port = init_port)
|
wxgeo/geophar | wxgeometrie/sympy/utilities/timeutils.py | Python | gpl-2.0 | 2,063 | 0.001454 | """Simple tools for timing functions' execution, when IPython is not available. """
from __future__ import print_function, division
import timeit
import math
from sympy.core.compatibility import range
_sca | les = [1e0, 1e3, 1e6, 1e9]
_units = [u's', u'ms', u'\N{GREEK SMALL LETTER MU}s', u'ns']
def timed(func, setup="pass", limit=None):
"""Adaptively measure execution time of a function. """
timer = timeit.Timer(func, setu | p=setup)
repeat, number = 3, 1
for i in range(1, 10):
if timer.timeit(number) >= 0.2:
break
elif limit is not None and number >= limit:
break
else:
number *= 10
time = min(timer.repeat(repeat, number)) / number
if time > 0.0:
order = min(-int(math.floor(math.log10(time)) // 3), 3)
else:
order = 3
return (number, time, time*_scales[order], _units[order])
# Code for doing inline timings of recursive algorithms.
def __do_timings():
import os
res = os.getenv('SYMPY_TIMINGS', '')
res = [x.strip() for x in res.split(',')]
return set(res)
_do_timings = __do_timings()
_timestack = None
def _print_timestack(stack, level=1):
print('-'*level, '%.2f %s%s' % (stack[2], stack[0], stack[3]))
for s in stack[1]:
_print_timestack(s, level + 1)
def timethis(name):
def decorator(func):
global _do_timings
if not name in _do_timings:
return func
def wrapper(*args, **kwargs):
from time import time
global _timestack
oldtimestack = _timestack
_timestack = [func.func_name, [], 0, args]
t1 = time()
r = func(*args, **kwargs)
t2 = time()
_timestack[2] = t2 - t1
if oldtimestack is not None:
oldtimestack[1].append(_timestack)
_timestack = oldtimestack
else:
_print_timestack(_timestack)
_timestack = None
return r
return wrapper
return decorator
|
pferreir/indico | indico/modules/news/controllers.py | Python | mit | 3,954 | 0.001517 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from datetime import timedelta
from flask import flash, redirect, request, session
from indico.core.db import db
from indico.modules.admin import RHAdminBase
from indico.modules.news import logger, news_settings
from indico.modules.news.forms import NewsForm, NewsSettingsForm
from indico.modules.news.models.news import NewsItem
from indico.modules.news.util import get_recent_news
from indico.modules.news.views import WPManageNews, WPNews
from indico.util.date_time import now_utc
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.forms.base import FormDefaults
from indico.web.rh import RH
from indico.web.util import jsonify_data, jsonify_form
class RHNews(RH):
@staticmethod
| def _is_new(item):
days = news_settings.get('new_days')
if not days:
return False
return item.created_dt.date() >= (now_utc() - timedelta(days=days)).date()
def _process(self):
news = NewsItem.query.order_by(NewsItem.created_dt.desc()).all()
return WPNews.render_template('news.html', news=news, _is_new=self._i | s_new)
class RHNewsItem(RH):
normalize_url_spec = {
'locators': {
lambda self: self.item.locator.slugged
}
}
def _process_args(self):
self.item = NewsItem.get_or_404(request.view_args['news_id'])
def _process(self):
return WPNews.render_template('news_item.html', item=self.item)
class RHManageNewsBase(RHAdminBase):
pass
class RHManageNews(RHManageNewsBase):
def _process(self):
news = NewsItem.query.order_by(NewsItem.created_dt.desc()).all()
return WPManageNews.render_template('admin/news.html', 'news', news=news)
class RHNewsSettings(RHManageNewsBase):
def _process(self):
form = NewsSettingsForm(obj=FormDefaults(**news_settings.get_all()))
if form.validate_on_submit():
news_settings.set_multi(form.data)
get_recent_news.clear_cached()
flash(_('Settings have been saved'), 'success')
return jsonify_data()
return jsonify_form(form)
class RHCreateNews(RHManageNewsBase):
def _process(self):
form = NewsForm()
if form.validate_on_submit():
item = NewsItem()
form.populate_obj(item)
db.session.add(item)
db.session.flush()
get_recent_news.clear_cached()
logger.info('News %r created by %s', item, session.user)
flash(_("News '{title}' has been posted").format(title=item.title), 'success')
return jsonify_data(flash=False)
return jsonify_form(form)
class RHManageNewsItemBase(RHManageNewsBase):
def _process_args(self):
RHManageNewsBase._process_args(self)
self.item = NewsItem.get_or_404(request.view_args['news_id'])
class RHEditNews(RHManageNewsItemBase):
def _process(self):
form = NewsForm(obj=self.item)
if form.validate_on_submit():
old_title = self.item.title
form.populate_obj(self.item)
db.session.flush()
get_recent_news.clear_cached()
logger.info('News %r modified by %s', self.item, session.user)
flash(_("News '{title}' has been updated").format(title=old_title), 'success')
return jsonify_data(flash=False)
return jsonify_form(form)
class RHDeleteNews(RHManageNewsItemBase):
def _process(self):
db.session.delete(self.item)
get_recent_news.clear_cached()
flash(_("News '{title}' has been deleted").format(title=self.item.title), 'success')
logger.info('News %r deleted by %r', self.item, session.user)
return redirect(url_for('news.manage'))
|
evangeline97/localwiki-backend-server | localwiki/links/migrations/0002_populate_page_links.py | Python | gpl-2.0 | 3,623 | 0.00552 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.utils.encoding import smart_str
class Migration(DataMigration):
def forwards(self, orm):
from pages.models import slugify
from links import extract_internal_links
for page in orm['pages.Page'].objects.all().iterator():
region = page.region
links = extract_internal_links(page.content)
print "..recording page links on %s" % smart_str(page.name)
for pagename, count in links.iteritems():
page_exists = orm['pages.Page'].objects.filter(slug=slugify(pagename), region=region)
if page_exists:
destination = page_exists[0]
else:
destination = None
if orm.Link.objects.filter(source=page, destination=destination).exists():
continue
| if orm.Link.objects.filter(source=page, destination_name__iexact=pagename).exists():
if destination:
link = orm.Link.objects.filter(source=page, destination_name__iexact=pagename)[0]
link.destination = desti | nation
link.save()
else:
link = orm.Link(
source=page,
region=region,
destination=destination,
destination_name=pagename,
count=count,
)
link.save()
def backwards(self, orm):
orm.Link.objects.all().delete()
models = {
'links.link': {
'Meta': {'unique_together': "(('source', 'destination'),)", 'object_name': 'Link'},
'count': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'destination': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'links_to_here'", 'null': 'True', 'to': "orm['pages.Page']"}),
'destination_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['regions.Region']"}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'links'", 'to': "orm['pages.Page']"})
},
'pages.page': {
'Meta': {'unique_together': "(('slug', 'region'),)", 'object_name': 'Page'},
'content': ('pages.fields.WikiHTMLField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['regions.Region']", 'null': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'regions.region': {
'Meta': {'object_name': 'Region'},
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'geom': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'})
}
}
complete_apps = ['links']
symmetrical = True
|
Carvuh/TerminalQuest | PlayerInputManager.py | Python | mit | 2,393 | 0.005433 | from DungeonMaster import bcolors
from random import randint
class InputManager:
correctCommand = False
def ParsePlayerInput(playerInput):
commandTokens = ["/yell", "/roll", "/say", "/grab", "/pickup", "/setRac | e"]
## by character
playerInputTokens = list(playerInput)
## by string
splitPlayerInput = playerInput.split()
if splitPlayerInput[0] in commandTokens:
InputManager.correctCommand = True
## LETS ROLL S | OME DICE!
if '/roll' in splitPlayerInput:
if 'd20' in splitPlayerInput[1]:
analyzeRollParse = list(splitPlayerInput[1])
if analyzeRollParse[0] != 'd':
for rollTime in range(0, int(analyzeRollParse[0])):
print(bcolors.OKGREEN + "You rolled: ", randint(0, 20), "" + bcolors.ENDC)
else:
print(bcolors.OKGREEN + "You rolled: ", randint(0, 20), "" + bcolors.ENDC)
if 'd12' in splitPlayerInput[1]:
analyzeRollParse = list(splitPlayerInput[1])
if analyzeRollParse[0] != 'd':
for rollTime in range(0, int(analyzeRollParse[0])):
print(bcolors.OKGREEN + "You rolled: ", randint(0, 12), "" + bcolors.ENDC)
else:
print(bcolors.OKGREEN + "You rolled: ", randint(0, 12), "" + bcolors.ENDC)
if 'd10' in splitPlayerInput[1]:
analyzeRollParse = list(splitPlayerInput[1])
if analyzeRollParse[0] != 'd':
for rollTime in range(0, int(analyzeRollParse[0])):
print(bcolors.OKGREEN + "You rolled: ", randint(0, 10), "" + bcolors.ENDC)
else:
print(bcolors.OKGREEN + "You rolled: ", randint(0, 10), "" + bcolors.ENDC)
if '/setRace' in splitPlayerInput:
from CharacterSheet import Player
if splitPlayerInput[1] in Player.playableRaces:
Player.playerRace == splitPlayerInput[1]
print(Player.playerRace)
## Now if no command meets the anything in the list of commands.
else:
print(bcolors.FAIL + "Please enter a valid command." + bcolors.ENDC)
|
elastic/elasticsearch-py | elasticsearch/_async/client/graph.py | Python | apache-2.0 | 3,806 | 0.000788 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import typing as t
from elastic_transport import ObjectApiResponse
from ._base import NamespacedClient
from .utils import SKIP_IN_PATH, _quote, _rewrite_parameters
class GraphClient(NamespacedClient):
@_rewrite_parameters(
body_fields=True,
)
async def explore(
self,
*,
index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]],
connections: t.Optional[t.Mapping[str, t.Any]] = None,
controls: t.Optional[t.Mapping[str, t.Any]] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
query: t.Optional[t.Mapping[str, t.Any]] = None,
routing: t.Optional[str] = None,
timeout: t.Optional[t.Union[int, str]] = None,
vertices: t.Optional[
t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Explore extracted and summarized information about the documents and terms in
an index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/graph-explore-api.html>`_
:param index: A comma-separated list of index names to search; use `_all` or
empty string to perform the operation on all indices
:param connections:
:param controls:
:param query:
:param routing: Specific routing value
:param timeout: Explicit operation timeout
:param vertices:
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
__path = f"/{_quote(index)}/_graph/explore"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if connections is not None:
__body["connections"] = connections
if controls is not None:
__body["controls"] = controls
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if query is not None:
__body["query"] = query
if routing is not None:
__query["routing"] = routing
if timeout is not None:
__query["timeout"] = timeout
if vertices is not None:
__body["vertices"] = vertices
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "appli | cation/json"
return await self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers= | __headers, body=__body
)
|
egnyte/python-egnyte | egnyte/tests/test_folders.py | Python | mit | 2,749 | 0.00146 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from egnyte import exc
from egnyte.tests.config import EgnyteTestCase
FOLDER_NAME = 'Iñtërnâtiônàlizætiøν☃ test'
DESTINATION_FOLDER_NAME = 'destination'
S | UB_FOLDER_NAME = 'subfolder'
FILE_IN_FOLDER_NAME = 'test.txt'
FILE_CONTENT = b'TEST FILE CONTENT'
class TestFolders(EgnyteTestCase):
def setUp(self):
super(TestFolders, self).setUp()
self.folder = self.root_folder.folder(FOLDER_NAME)
self.destination = self.root_folder.folder(DESTINATION_FOLDER_NAME)
self.file = self.folder.file(FILE_IN_FOLDER_NAME)
def test_folder_create(self):
self.folder.creat | e()
self.assertIsNone(self.folder.check())
with self.assertRaises(exc.InsufficientPermissions):
self.folder.create(False)
def test_folder_recreate(self):
self.folder.create()
self.assertIsNone(self.folder.check())
self.folder.create()
self.assertIsNone(self.folder.check())
def test_folder_cannot_recreate(self):
self.folder.create()
self.assertIsNone(self.folder.check())
with self.assertRaises(exc.InsufficientPermissions):
self.folder.create(False)
def test_folder_delete(self):
self.folder.create()
self.assertIsNone(self.folder.check())
self.folder.delete()
with self.assertRaises(exc.NotFound):
self.folder.check()
def test_folder_move(self):
self.folder.create()
moved = self.folder.move(self.destination.path)
self.assertEqual(moved.path, self.destination.path, "Moved folder path should be identical")
with self.assertRaises(exc.NotFound):
self.folder.check()
def test_folder_copy(self):
self.folder.create()
copied = self.folder.copy(self.destination.path)
self.assertEqual(copied.path, self.destination.path, "Copied folder path should be identical")
self.assertIsNone(self.folder.check())
self.assertIsNone(self.destination.check())
def test_folder_list(self):
self.folder.create()
subfolder = self.folder.folder(SUB_FOLDER_NAME).create()
_file = self.folder.file(FILE_IN_FOLDER_NAME)
_file.upload(FILE_CONTENT)
self.folder.list()
folders_list = self.folder.folders
self.assertEqual(1, len(folders_list), "There should be one subfolder")
self.assertEqual(folders_list[0]._url, subfolder._url, "Subfolder URLs should be identical")
files_list = self.folder.files
self.assertEqual(1, len(files_list), "There should be one file")
self.assertEqual(files_list[0]._url, _file._url, "File URLs should be identical")
|
mcsalgado/ansible | lib/ansible/playbook/play_context.py | Python | gpl-3.0 | 16,111 | 0.009252 | # -*- coding: utf-8 -*-
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pipes
import random
import re
from six import iteritems
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.playbook.base import Base
from ansible.template import Templar
from ansible.utils.boolean import boolean
from ansible.utils.unicode import to_unicode
__all__ = ['PlayContext']
# the magic variable mapping dictionary below is used to translate
# host/inventory variables to fields in the PlayContext
# object. The dictionary values are tuples, to account for aliases
# in variable names.
MAGIC_VARIABLE_MAPPING = dict(
connection = ('ansible_connection',),
remote_addr = ('ansible_ssh_host', 'ansible_host'),
remote_user = ('ansible_ssh_user', 'ansible_user'),
port = ('ansible_ssh_port', 'ansible_port'),
password = ('ansible_ssh_pass', 'ansible_password'),
private_key_file = ('ansible_ssh_private_key_file', 'ansible_private_key_file'),
shell = ('ansible_shell_type',),
become = ('ansible_become',),
become_method = ('ansible_become_method',),
become_user = ('ansible_become_user',),
become_pass = ('ansible_become_password','ansible_become_pass'),
become_exe = ('ansible_become_exe',),
become_flags = ('ansible_become_flags',),
sudo = ('ansible_sudo',),
sudo_user = ('ansible_sudo_user',),
sudo_pass = ('ansible_sudo_password', 'ansible_sudo_pass'),
sudo_exe = ('ansible_sudo_exe',),
sudo_flags = ('ansible_sudo_flags',),
su = ('ansible_su',),
su_user = ('ansible_su_user',),
su_pass = ('ansible_su_password', 'ansible_su_pass'),
su_exe = ('ansible_su_exe',),
su_flags = ('ansible_su_flags',),
)
SU_PROMPT_LOCALIZATIONS = [
'Password',
'암호',
'パスワード',
'Adgangskode',
'Contraseña',
'Contrasenya',
'Hasło',
'Heslo',
'Jelszó',
'Lösenord',
'Mật khẩu',
'Mot de passe',
'Parola',
'Parool',
'Pasahitza',
'Passord',
'Passwort',
'Salasana',
'Sandi',
'Senha',
'Wachtwoord',
'ססמה',
'Лозинка',
'Парола',
'Пароль',
'गुप्तशब्द',
'शब्दकूट',
'సంకేతపదము',
'හස්පදය',
'密码',
'密碼',
]
TASK_ATTRIBUTE_OVERRIDES = (
'become',
'become_user',
'become_pass',
'become_method',
'connection',
'delegate_to',
'no_log',
'remote_user',
)
class PlayContext(Base):
'''
This class is used to consolidate the connection information for
hosts in a play and child tasks, where the task may override some
connection/authentication information.
'''
# connection fields, some are inherited from Base:
# (connection, port, remote_user, environment, no_log)
_remote_addr = FieldAttribute(isa='string')
_password = FieldAttribute(isa='string')
_private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE)
_timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
_shell = FieldAttribute(isa='string')
_ssh_extra_args = FieldAttribute(isa='string')
_connection_lockfd= FieldAttribute(isa='int')
# privilege escalation fields
_become = FieldAttribute(isa='bool')
_become_method = FieldAttribute(isa='string')
_become_user = FieldAttribute(isa='string')
_become_pass = FieldAttribute(isa='string')
_become_exe = FieldAttribute(isa='string')
_become_flags = FieldAttribute(isa='string')
_prompt = FieldAttribute(isa='string')
# backwards compatibility fields for sudo/su
_sudo_exe = FieldAttribute(isa='string')
_sudo_flags = FieldAttribute(isa='string')
_sudo_pass = FieldAttribute(isa='string')
_su_exe = FieldAttribute(isa='string')
_su_flags = FieldAttribute(isa='string')
_su_pass = FieldAttribute(isa='string')
# general flags
_verbosity = FieldAttribute(isa='int', default=0)
_only_tags = FieldAttribute(isa='set', default=set())
_skip_tags = FieldAttribute(isa='set', default=set())
_check_mode = FieldAttribute(isa='bool', default=False)
_force_handlers = FieldAttribute(isa='bool', default=False)
_start_at_task = FieldAttribute(isa='string')
_step = FieldAttribute(isa='bool', default=False)
_diff = FieldAttribute(isa='bool', default=False)
def __init__(self, play=None, options=None, passwords=None, connection_lockfd=None):
super(PlayContext, self).__init__()
if passwords is No | ne:
passwords = {}
self.password = passwords.get('conn_pass','') |
self.become_pass = passwords.get('become_pass','')
# a file descriptor to be used during locking operations
self.connection_lockfd = connection_lockfd
# set options before play to allow play to override them
if options:
self.set_options(options)
if play:
self.set_play(play)
def set_play(self, play):
'''
Configures this connection information instance with data from
the play class.
'''
if play.connection:
self.connection = play.connection
if play.remote_user:
self.remote_user = play.remote_user
if play.port:
self.port = int(play.port)
if play.become is not None:
self.become = play.become
if play.become_method:
self.become_method = play.become_method
if play.become_user:
self.become_user = play.become_user
# non connection related
self.no_log = play.no_log
if play.force_handlers is not None:
self.force_handlers = play.force_handlers
def set_options(self, options):
'''
Configures this connection information instance with data from
options specified by the user on the command line. These have a
lower precedence than those set on the play or host.
'''
if options.connection:
self.connection = options.connection
self.remote_user = options.remote_user
self.private_key_file = options.private_key_file
self.ssh_extra_args = options.ssh_extra_args
# privilege escalation
self.become = options.become
self.become_method = options.become_method
self.become_user = options.become_user
# general flags (should we move out?)
if options.verbosity:
self.verbosity = options.verbosity
#if options.no_log:
# self.no_log = boolean(options.no_log)
if options.check:
self.check_mode = boolean(options.check)
if hasattr(options, 'force_handlers') and options.force_handlers:
self.force_handlers = boolean(options.force_handlers)
if hasattr(options, 'step') and options.step:
self.step = boolean(options.step)
if hasattr(options, 'start_at_task') and options.start_at_task:
self.start_at_task = to_unicode(options.start_at_task)
|
JorgeDeLosSantos/pyqus | pyqus/examples/dat/Lug/post_u_data.py | Python | mit | 420 | 0.009524 | # -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import numpy as np
plt.style.use('ggplot')
data = np.loadtx | t("u.txt", delimiter=",")
nodes = data[:,0]
ux = data[:,1]
uy = data[:,2]
usum = data[:,3]
plt. | plot(nodes, ux, "r--", label="ux")
plt.plot(nodes, uy, "g--", label="uy")
plt.plot(nodes, usum, "b", label="usum")
plt.xlabel("Nodes")
plt.ylabel("Displacement (m)")
plt.xticks(nodes)
plt.legend()
plt.show()
|
wxgeo/geophar | wxgeometrie/sympy/tensor/tests/test_indexed.py | Python | gpl-2.0 | 14,359 | 0.002159 | from sympy.core import symbols, Symbol, Tuple, oo, Dummy
from sympy.core.compatibility import iterable, range
from sympy.tensor.indexed import IndexException
from sympy.utilities.pytest import raises, XFAIL
# import test:
from sympy import IndexedBase, Idx, Indexed, S, sin, cos, Sum, Piecewise, And, Order, LessThan, StrictGreaterThan, \
GreaterThan, StrictLessThan, Range, Array, Subs, Function, KroneckerDelta, Derivative
def test_Idx_construction():
i, a, b = symbols('i a b', integer=True)
assert Idx(i) != Idx(i, 1)
assert Idx(i, a) == Idx(i, (0, a - 1))
assert Idx(i, oo) == Idx(i, (0, oo))
x = symbols('x', integer=False)
raises(TypeError, lambda: Idx(x))
raises(TypeError, lambda: Idx(0.5))
raises(TypeError, lambda: Idx(i, x))
raises(TypeError, lambda: Idx(i, 0.5))
raises(TypeError, lambda: Idx(i, (x, 5)))
raises(TypeError, lambda: Idx(i, (2, x)))
raises(TypeError, lambda: Idx(i, (2, 3.5)))
def test_Idx_properties():
i, a, b = symbols('i a b', integer=True)
assert Idx(i).is_integer
def test_Idx_bounds():
i, a, b = symbols('i a b', integer=True)
assert Idx(i).lower is None
assert Idx(i).upper is None
assert Idx(i, a).lower == 0
assert Idx(i, a).upper == a - 1
assert Idx(i, 5).lower == 0
assert Idx(i, 5).upper == 4
assert Idx(i, oo).lower == 0
assert Idx(i, oo).upper == oo
assert Idx(i, (a, b)).lower == a
assert Idx(i, (a, b)).upper == b
assert Idx(i, (1, 5)).lower == 1
assert Idx(i, (1, 5)).upper == 5
assert Idx(i, (-oo, oo)).lower == -oo
assert Idx(i, (-oo, oo)).upper == oo
def test_Idx_fixed_bounds():
i, a, b, x = symbols('i a b x', integer=True)
assert Idx(x).lower is None
assert Idx(x).upper is None
assert Idx(x, a).lower == 0
assert Idx(x, a).upper == a - 1
assert Idx(x, 5).lower == 0
assert Idx(x, 5).upper == 4
assert Idx(x, oo).lower == 0
assert Idx(x, oo).upper == oo
assert Idx(x, (a, b)).lower == a
assert Idx(x, (a, b)).upper == b
assert Idx(x, (1, 5)).lower == 1
assert Idx(x, (1, 5)).upper == 5
assert Idx(x, (-oo, oo)).lower == -oo
assert Idx(x, (-oo, oo)).upper == oo
def test_Idx_inequalities():
i14 = Idx("i14", (1, 4))
i79 = Idx("i79", (7, 9))
i46 = Idx("i46", (4, 6))
i35 = Idx("i35", (3, 5))
assert i14 <= 5
assert i14 < 5
assert not (i14 >= 5)
assert not (i14 > 5)
assert 5 >= i14
assert 5 > i14
assert not (5 <= i14)
assert not (5 < i14)
assert LessThan(i14, 5)
assert StrictLessThan(i14, 5)
assert not GreaterThan(i14, 5)
assert not StrictGreaterThan(i14, 5)
assert i14 <= 4
assert isinstance(i14 < 4, StrictLessThan)
assert isinstance(i14 >= 4, GreaterThan)
assert not (i14 > 4)
assert isinstance(i14 <= 1, LessThan)
assert not (i14 < 1)
assert i14 >= 1
assert isinstance(i14 > 1, StrictGreaterThan)
assert not (i14 <= 0)
assert not (i14 < 0)
assert i14 >= 0
assert i14 > 0
from sympy.abc import x
assert isinstance(i14 < x, StrictLessThan)
assert isinstance(i14 > x, StrictGreaterThan)
assert isinstance(i14 <= x, LessThan)
assert isinstance(i14 >= x, GreaterThan)
assert i14 < i79
assert i14 <= i79
assert not (i14 > i79)
assert not (i14 >= i79)
assert i14 <= i46
assert isinstance(i14 < i46, StrictLessThan)
assert isinstance(i14 >= i46, GreaterThan)
assert not (i14 > i46)
assert isinstance(i14 < i35, StrictLessThan)
assert isinstance(i14 > i35, StrictGreaterThan)
assert isinstance | (i14 <= i35, LessThan)
assert isinstance(i14 >= i35, GreaterTh | an)
iNone1 = Idx("iNone1")
iNone2 = Idx("iNone2")
assert isinstance(iNone1 < iNone2, StrictLessThan)
assert isinstance(iNone1 > iNone2, StrictGreaterThan)
assert isinstance(iNone1 <= iNone2, LessThan)
assert isinstance(iNone1 >= iNone2, GreaterThan)
@XFAIL
def test_Idx_inequalities_current_fails():
i14 = Idx("i14", (1, 4))
assert S(5) >= i14
assert S(5) > i14
assert not (S(5) <= i14)
assert not (S(5) < i14)
def test_Idx_func_args():
i, a, b = symbols('i a b', integer=True)
ii = Idx(i)
assert ii.func(*ii.args) == ii
ii = Idx(i, a)
assert ii.func(*ii.args) == ii
ii = Idx(i, (a, b))
assert ii.func(*ii.args) == ii
def test_Idx_subs():
i, a, b = symbols('i a b', integer=True)
assert Idx(i, a).subs(a, b) == Idx(i, b)
assert Idx(i, a).subs(i, b) == Idx(b, a)
assert Idx(i).subs(i, 2) == Idx(2)
assert Idx(i, a).subs(a, 2) == Idx(i, 2)
assert Idx(i, (a, b)).subs(i, 2) == Idx(2, (a, b))
def test_IndexedBase_sugar():
i, j = symbols('i j', integer=True)
a = symbols('a')
A1 = Indexed(a, i, j)
A2 = IndexedBase(a)
assert A1 == A2[i, j]
assert A1 == A2[(i, j)]
assert A1 == A2[[i, j]]
assert A1 == A2[Tuple(i, j)]
assert all(a.is_Integer for a in A2[1, 0].args[1:])
def test_IndexedBase_subs():
i, j, k = symbols('i j k', integer=True)
a, b, c = symbols('a b c')
A = IndexedBase(a)
B = IndexedBase(b)
C = IndexedBase(c)
assert A[i] == B[i].subs(b, a)
assert isinstance(C[1].subs(C, {1: 2}), type(A[1]))
def test_IndexedBase_shape():
i, j, m, n = symbols('i j m n', integer=True)
a = IndexedBase('a', shape=(m, m))
b = IndexedBase('a', shape=(m, n))
assert b.shape == Tuple(m, n)
assert a[i, j] != b[i, j]
assert a[i, j] == b[i, j].subs(n, m)
assert b.func(*b.args) == b
assert b[i, j].func(*b[i, j].args) == b[i, j]
raises(IndexException, lambda: b[i])
raises(IndexException, lambda: b[i, i, j])
F = IndexedBase("F", shape=m)
assert F.shape == Tuple(m)
assert F[i].subs(i, j) == F[j]
raises(IndexException, lambda: F[i, j])
def test_Indexed_constructor():
i, j = symbols('i j', integer=True)
A = Indexed('A', i, j)
assert A == Indexed(Symbol('A'), i, j)
assert A == Indexed(IndexedBase('A'), i, j)
raises(TypeError, lambda: Indexed(A, i, j))
raises(IndexException, lambda: Indexed("A"))
assert A.free_symbols == {A, A.base.label, i, j}
def test_Indexed_func_args():
i, j = symbols('i j', integer=True)
a = symbols('a')
A = Indexed(a, i, j)
assert A == A.func(*A.args)
def test_Indexed_subs():
i, j, k = symbols('i j k', integer=True)
a, b = symbols('a b')
A = IndexedBase(a)
B = IndexedBase(b)
assert A[i, j] == B[i, j].subs(b, a)
assert A[i, j] == A[i, k].subs(k, j)
def test_Indexed_properties():
i, j = symbols('i j', integer=True)
A = Indexed('A', i, j)
assert A.rank == 2
assert A.indices == (i, j)
assert A.base == IndexedBase('A')
assert A.ranges == [None, None]
raises(IndexException, lambda: A.shape)
n, m = symbols('n m', integer=True)
assert Indexed('A', Idx(
i, m), Idx(j, n)).ranges == [Tuple(0, m - 1), Tuple(0, n - 1)]
assert Indexed('A', Idx(i, m), Idx(j, n)).shape == Tuple(m, n)
raises(IndexException, lambda: Indexed("A", Idx(i, m), Idx(j)).shape)
def test_Indexed_shape_precedence():
i, j = symbols('i j', integer=True)
o, p = symbols('o p', integer=True)
n, m = symbols('n m', integer=True)
a = IndexedBase('a', shape=(o, p))
assert a.shape == Tuple(o, p)
assert Indexed(
a, Idx(i, m), Idx(j, n)).ranges == [Tuple(0, m - 1), Tuple(0, n - 1)]
assert Indexed(a, Idx(i, m), Idx(j, n)).shape == Tuple(o, p)
assert Indexed(
a, Idx(i, m), Idx(j)).ranges == [Tuple(0, m - 1), Tuple(None, None)]
assert Indexed(a, Idx(i, m), Idx(j)).shape == Tuple(o, p)
def test_complex_indices():
i, j = symbols('i j', integer=True)
A = Indexed('A', i, i + j)
assert A.rank == 2
assert A.indices == (i, i + j)
def test_not_interable():
i, j = symbols('i j', integer=True)
A = Indexed('A', i, i + j)
assert not iterable(A)
def test_Indexed_coeff():
N = Symbol('N', integer=True)
len_y = N
i = Idx('i', len_y-1)
y = IndexedBase('y', shape=(len_y,))
a = (1/y[i+1]*y[i]).coeff(y[i])
b = (y[i]/y[i+1]).coeff(y[i])
assert a == b |
mozman/ezdxf | examples/text_layout_engine_usage.py | Python | mit | 12,087 | 0.001324 | # Copyright (c) 2021, Manfred Moitzi
# License: MIT License
import sys
from typing import Iterable
import pathlib
import random
import ezdxf
from ezdxf import zoom, print_config
from ezdxf.math import Matrix44
from ezdxf.tools import fonts
from ezdxf.tools import text_layout as tl
"""
This example shows the usage of the internal text_layout module to render
complex text layouts. The module is designed to render MText like entities,
but could be used for other tasks too. The layout engine supports a multi
column setup, each column contains paragraphs, and these paragraphs can
automatically flow across the columns. All locations are relative to each other, |
absolute locations are not supported - tabulators are not supported.
The layout engine knows nothing about the content itself, it just manages
content boxes of a fixed given width and height and "glue" spaces in between.
The engine does not alter the size of the content boxes, but resizes the glue
if necessary. The actual rendering is done by a rendering object associated to
each content box.
The only te | xt styling manged by the layout engine is underline, overline and
strike through multiple content boxes.
Features:
- layout alignment like MText: top-middle-bottom combined with left-center-right
- paragraph alignments: left, right, center, justified
- paragraph indentation: left, right, special first line
- cell alignments: top, center, bottom
- fraction cells: over, slanted, tolerance style
- columns have a fixed height or grows automatically, paragraphs which do not
fit "flow" into the following column.
- pass through of transformation matrix to the rendering object
TODO:
- bullet- and numbered lists
- refinements to replicate MText features as good as possible
Used for:
- drawing add-on to render MTEXT with columns
- explode MTEXT into DXF primitives (TEXT, LINE)
"""
if not ezdxf.options.use_matplotlib:
print("The Matplotlib package is required.")
sys.exit(1)
# Type alias:
Content = Iterable[tl.Cell]
DIR = pathlib.Path("~/Desktop/Outbox").expanduser()
STYLE = "Style0"
FONT = "OpenSans-Regular.ttf"
COLUMN_HEIGHT: float = 12
print_config()
doc = ezdxf.new()
msp = doc.modelspace()
style = doc.styles.new(STYLE, dxfattribs={"font": FONT})
def measure_space(font):
return font.text_width(" X") - font.text_width("X")
class SizedFont:
def __init__(self, height: float):
self.height = float(height)
self.font = fonts.make_font(FONT, self.height)
self.space = measure_space(self.font)
def text_width(self, text: str):
return self.font.text_width(text)
fix_sized_fonts = [
SizedFont(0.18),
SizedFont(0.35),
SizedFont(0.50),
SizedFont(0.70),
SizedFont(1.00),
]
class FrameRenderer(tl.ContentRenderer):
"""Render object to render a frame around a content collection.
This renderer can be used by collections which just manages content
but do not represent a content by itself (Layout, Column, Paragraph).
"""
def __init__(self, color):
self.color = color
def render(
self,
left: float,
bottom: float,
right: float,
top: float,
m: Matrix44 = None,
) -> None:
"""Render a frame as LWPOLYLINE."""
pline = msp.add_lwpolyline(
[(left, top), (right, top), (right, bottom), (left, bottom)],
close=True,
dxfattribs={"color": self.color},
)
if m:
pline.transform(m)
def line(
self, x1: float, y1: float, x2: float, y2: float, m: Matrix44 = None
) -> None:
"""Line renderer used to create underline, overline, strike through
and fraction dividers.
"""
line = msp.add_line(
(x1, y1), (x2, y2), dxfattribs={"color": self.color}
)
if m:
line.transform(m)
class TextRenderer(tl.ContentRenderer):
"""Text content renderer."""
def __init__(self, text, attribs):
self.text = text
self.attribs = attribs
self.line_attribs = {"color": attribs["color"]}
def render(
self,
left: float,
bottom: float,
right: float,
top: float,
m: Matrix44 = None,
):
"""Create/render the text content"""
text = msp.add_text(self.text, dxfattribs=self.attribs)
text.set_pos((left, bottom), align="LEFT")
if m:
text.transform(m)
def line(
self, x1: float, y1: float, x2: float, y2: float, m: Matrix44 = None
) -> None:
"""Line renderer used to create underline, overline, strike through
and fraction dividers.
"""
line = msp.add_line((x1, y1), (x2, y2), dxfattribs=self.line_attribs)
if m:
line.transform(m)
class Word(tl.Text):
"""Represent a word as content box for the layout engine."""
def __init__(self, text: str, font: SizedFont, stroke: int = 0):
# Each content box can have individual properties:
attribs = {
"color": random.choice((1, 2, 3, 4, 6, 7, 7)),
"height": font.height,
"style": STYLE,
}
super().__init__(
# Width and height of the content are fixed given values and will
# not be changed by the layout engine:
width=font.text_width(text),
height=font.height,
stroke=stroke,
# Each content box can have it's own rendering object:
renderer=TextRenderer(text, attribs),
)
def uniform_content(count: int, size: int = 1) -> Content:
"""Create content with one text size."""
font = fix_sized_fonts[size]
for word in tl.lorem_ipsum(count):
yield Word(word, font)
yield tl.Space(font.space)
def random_sized_content(count: int) -> Content:
"""Create content with randomized text size."""
def size():
return random.choice([0, 1, 1, 1, 1, 1, 2, 3])
for word in tl.lorem_ipsum(count):
font = fix_sized_fonts[size()]
yield Word(word, font)
yield tl.Space(font.space)
def stroke_groups(words: Iterable[str]):
group = []
count = 0
stroke = 0
for word in words:
if count == 0:
if group:
yield group, stroke
count = random.randint(1, 4)
group = [word]
stroke = random.choice([0, 0, 0, 0, 1, 1, 1, 2, 2, 4])
else:
count -= 1
group.append(word)
if group:
yield group, stroke
def stroked_content(count: int, size: int = 1) -> Content:
"""Create content with one text size and groups of words with or without
strokes.
"""
font = fix_sized_fonts[size]
groups = stroke_groups(tl.lorem_ipsum(count))
for group, stroke in groups:
# strokes should span across spaces in between words:
# Spaces between words are bound to the preceding content box renderer,
# MText is more flexible, but this implementation is easy and good
# enough, otherwise spaces would need a distinct height and a rendering
# object, both are not implemented for glue objects.
continue_stroke = stroke + 8 if stroke else 0
for word in group[:-1]:
yield Word(word, font=font, stroke=continue_stroke)
yield tl.Space(font.space)
# strokes end at the last word, without continue stroke:
yield Word(group[-1], font=font, stroke=stroke)
yield tl.Space(font.space)
class Fraction(tl.Fraction):
"""Represents a fraction for the layout engine, which consist of a top-
and bottom content box, divided by horizontal or slanted line.
The "tolerance style" has no line between the stacked content boxes.
This implementation is more flexible than MText, the content boxes can be
words but also fractions or cell groups.
"""
def __init__(
self, t1: str, t2: str, stacking: tl.Stacking, font: SizedFont
):
top = Word(t1, font)
bottom = Word(t2, font)
super().__init__(
top=top,
bottom=bottom,
s |
CloverHealth/airflow | airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py | Python | apache-2.0 | 1,564 | 0.001918 | # flake8: noqa
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add kubernetes scheduler uniqueness
Revision ID: 86770d1215c0
Revises: 27c6a30d7c24
Create Date: 2018-04-03 15:31:20.814328
"""
# revision identifiers, used by Alembic.
revision = '86770d1215c0'
down_revision = '27c6a30d7c24'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
RESOURCE_TABLE = "kube_worker_uuid"
def upgrade():
table = op.create_table(
RESOURCE_TABLE,
sa.Colu | mn("one_row_id", sa.Boolean, server_default=sa.true(), primary_key | =True),
sa.Column("worker_uuid", sa.String(255)),
sa.CheckConstraint("one_row_id", name="kube_worker_one_row_id")
)
op.bulk_insert(table, [
{"worker_uuid": ""}
])
def downgrade():
op.drop_table(RESOURCE_TABLE)
|
eriwoon/2048 | main.py | Python | mit | 9,629 | 0.028595 | #! /usr/bin/python
#encoding=UTF-8
'''
Created on 2014-5-15
@author: XIAO Zhen
'''
'''哈哈'''
import Tkinter as tk
import time
import random
class Application(tk.Frame):
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.winfo_toplevel().rowconfigure(0,minsize = 1)
self.winfo_toplevel().columnconfigure(0,minsize = 1)
self.grid()
self.createWidgets()
self.random()
self.random()
self.focus_set()
self.bind("<Up>", self.callback)
self.bind("<Down>", self.callback)
self.bind("<Left>", self.callback)
self.bind("<Right>", self.callback)
self.pack()
def createWidgets(self):
#direction buttons, up down left and right
self.direction = {}
self.direction['up'] = tk.Button(self, text = '⇩', height = 2)
self.direction['up'].configure(command = (lambda dir = 'to_down': self.todirection(dir)))
self.direction['up'].grid(row = 0,column = 1, columnspan = 4, sticky = tk.W + tk.E)
self.direction['down'] = tk.Button(self, text = '⇧', height = 2)
self.direction['down'].configure(command = (lambda dir = 'to_up': self.todirection(dir)))
self.direction['down'].grid(row = 5,column = 1, columnspan = 4, sticky = tk.W + tk.E)
self.direction['left'] = tk.Button(self, text = '⇨', width = 3)
self.direction['left'].configure(command = (lambda dir = 'to_right': self.todirection(dir)))
self.direction['left'].grid(row = 1,column = 0, rowspan = 4, sticky = tk.N + tk.S)
self.direction['right'] = tk.Button(self, text = '⇦', width = 3)
self.direction['right'].configure(command = (lambda dir = 'to_left': self.todirection(dir)))
self.direction['right'].grid(row = 1,column = 5, rowspan = 4, sticky = tk.N + tk.S)
self.buttons = []
for i in range(0,16):
self.buttons.append(tk.Button(self, text = '0', height = 2, width = 5, background = "#FFFFFF", fg = '#FFFFFF'))
self.buttons[i].configure(command = (lambda b = self.buttons[i]: self.setNumber(b)))
self.buttons[i].grid(row = i/4 + 1,column=i%4 + 1)
#self.triggerButton = tk.Button(self, text = 'Print')
#self.triggerButton.grid(row = 0, column=1,ipadx = 100)
#control buttons, including mainly start and mode selections
self.controls = {}
self.controls['startgame'] = tk.Button(self, text = 'Start', height = 2, width = 5, command=self.startgame)
self.controls['startgame'].grid(row = 6, column = 4)
self.controls['test1'] = tk.Button(self, text = 'Test1', height = 2, width = 5, command=self.random)
self.controls['test1'].grid(row = 6,column = 1)
self.controls['test2'] = tk.Button(self, text = 'Test2', height = 2, width = 5, command=self.test2)
self.controls['test2'].grid(row = 6,column = 2)
self.controls['test3'] = tk.Button(self, text = 'Test3', height = 2, width = 5, command=self.test3)
self.controls['test3'].grid(row = 6,column = 3)
def setNumber(self,button):
pass
def startgame(self):
print('start game!')
def random(self):
empty = []
rand = -1
for i in range(0,16):
if self.buttons[i]['text'] == '0':
empty.append(i)
if len(empty) != 0:
rand = random.randrange(0,len(empty))
self.buttons[empty[rand]]['text'] = str(random.randrange(1,3) * 2)
self.setColors()
else:
print("no more fields")
if rand != -1:
self.buttons[empty[rand]].configure(background = '#0404B4', fg = '#000000')
def test2(self):
print('test2')
self.buttons[0]['text'] = '2'
self.buttons[1]['text'] = '2'
self.buttons[2]['text'] = '4'
self.buttons[3]['text'] = '8'
self.buttons[4]['text'] = '4'
self.buttons[5]['text'] = '2'
self.buttons[6]['text'] = '2'
self.buttons[7]['text'] = '8'
self.buttons[8]['text'] = '4'
self.buttons[9]['text'] = '2'
self.buttons[10]['text'] = '2'
self.buttons[11]['text'] = '8'
self.buttons[12]['text'] = '8'
self.buttons[13]['text'] = '8'
self.buttons[14]['text'] = '8'
self.buttons[15]['text'] = '8'
self.setColors()
def test3(self):
print('test3')
def callback(self,event):
if event.keysym == 'Up':
self.todirection('to_up')
elif event.keysym == 'Down':
self.todirection('to_down')
elif event.keysym == 'Left':
self.todirection('to_left')
elif event.keysym == 'Right':
self.todirection('to_right')
def sum(self,list):
for i in range (len(list),5):
list.append(0)
for i in range(0,3):
if list[i] == list[i+1] and list[i] != 0:
list[i] += list[i+1]
list[i+1] = 0
re = []
for i in range(0,4):
if list[i] != 0:
re.append(list[i])
for i in range (len(re),5):
re.append(0)
return re
def todirection(self, direction):
flag = 0
if direction == 'to_right':
#rows
for i in range(0, 4):
#columns:
list = []
for j in range(3, -1, -1):
if self.buttons[i*4 + j] != '0':
list.append(int(self.buttons[i*4 + j]['text']))
re = self.sum(list)
k = 0
for j in range(3, -1, -1):
| if self.buttons[i*4 + j]['text'] != str(re[k]):
flag = 1
self.buttons[i*4 + j]['text'] = str(re[k])
k += 1
elif direction == 'to_left':
#rows
for i in range(0, 4):
#columns:
list = []
for j in range(0, 4):
if self.buttons[i*4 + j] != '0':
list.append(int(s | elf.buttons[i*4 + j]['text']))
re = self.sum(list)
k = 0
for j in range(0, 4):
if self.buttons[i*4 + j]['text'] != str(re[k]):
flag = 1
self.buttons[i*4 + j]['text'] = str(re[k])
k += 1
elif direction == 'to_up':
#column
for i in range(0, 4):
#row:
list = []
for j in range(0, 4):
if self.buttons[i + j*4] != '0':
list.append(int(self.buttons[i + j*4]['text']))
re = self.sum(list)
k = 0
for j in range(0, 4):
if self.buttons[i + j*4]['text'] != str(re[k]):
flag = 1
self.buttons[i + j*4]['text'] = str(re[k])
k += 1
elif direction == 'to_down':
#column
for i in range(0, 4):
#rows:
list = []
for j in range(3, -1, -1):
if self.buttons[i + j*4] != '0':
list.append(int(self.buttons[i + j*4]['text']))
re = self.sum(list)
k = 0
for j in range(3, -1, -1):
if self.buttons[i + j*4]['text'] != str(re[k]):
flag = 1
self.buttons[i + j*4]['text'] = str(re[k])
k += 1
if flag != 0:
self.random()
def setColors(self):
for i in range(0,16):
self.setColor(self.buttons[i])
def setColor(self,button):
tmp = button['text']
if tmp == '0':
button.configure(backgro |
googlemaps/openapi-specification | dist/snippets/maps_http_geocode_place_id/maps_http_geocode_place_id.py | Python | apache-2.0 | 320 | 0.009375 | # [STA | RT maps_http_geocode_place_id]
import requests
url = "https://maps.googleapis.com/maps/api/geocode/json?place_id=ChIJd8BlQ2BZwokRAFUEcm_qrcA&key=YOUR_API_KEY"
payload={}
headers = {}
response = requests.request("GET", url, headers=headers, data=payload)
print(response.text)
# [END maps_http_geocode | _place_id] |
kawamon/hue | desktop/core/ext-py/celery-4.2.1/celery/worker/__init__.py | Python | apache-2.0 | 152 | 0 | """Worker implementation."""
from __future__ import absolute_import, unicode_lite | rals
from .worker import WorkController
__all__ = (' | WorkController',)
|
weiqiangdragonite/blog_tmp | python/学习文档/python_process/p2.py | Python | gpl-2.0 | 987 | 0.016211 | #!/usr/bin/env python
# -*- coding: utf-8 | -*-
import multiprocessing
import time
def worker_1(interval):
print "worker_1: %s" % (time.ctime())
time.sleep(interval)
print "end worker_1"
def worker_2(interval):
print "worker_2: %s" % (time.ctime())
time.sleep(interval)
print "end worker_2"
def worker_3(interval):
| print "worker_3: %s" % (time.ctime())
time.sleep(interval)
print "end worker_3"
if __name__ == "__main__":
p1 = multiprocessing.Process(target = worker_1, args = (2,))
p2 = multiprocessing.Process(target = worker_2, args = (3,))
p3 = multiprocessing.Process(target = worker_3, args = (4,))
print "before start: %s" % (time.ctime())
p1.start()
p2.start()
p3.start()
print "after start %s" % (time.ctime())
print "The number of cpu is: ", str(multiprocessing.cpu_count())
for p in multiprocessing.active_children():
print "child: name = %s, id = %s" % (p.name, p.pid)
print "END"
|
michaelpb/omnithumb | omnithumb/types/__init__.py | Python | gpl-3.0 | 35 | 0 | from .type | stri | ng import TypeString
|
jamesmarlowe/Python-Data-Writers | datawriters/sqlitewriter.py | Python | bsd-2-clause | 1,215 | 0.004938 | import sqlite3
class SqliteWriter:
def __init__(self, *args, **kwargs):
if 'database' in kwargs:
self.db_sqlite3 = kwargs['database']
else:
print 'missing database argument, using data.sqlite'
self.db_sqlite3 = 'data.sqlite'
if 'table' in kwargs:
self.db_table = kwargs['table']
else:
print 'missing table argument, using DataTable'
self.db_table = 'DataTable'
def save(self, list_of_dicts):
all_keys = l | ist(set().union(*(d.keys() for d in list_of_dicts)))
db = sqlite3.connect(self.db_sqlite3)
cursor = db.cursor()
CREATE_TABLE = '''CREATE TABLE IF NOT EXISTS '''+self.db_table+'''(
'''+' TEXT,'.join([k for k in all_keys])+' TEXT'+'''
)'''
cursor.execute(CREATE_TABLE)
columns = ', '.join(all_keys)
placeholders = ':'+', :'.join(all_keys)
qu | ery = 'INSERT INTO '+self.db_table+' (%s) VALUES (%s)' % (columns, placeholders)
cursor.executemany(query, ({k: d.get(k, None) for k in all_keys} for d in list_of_dicts))
db.commit()
cursor.close()
|
benfinke/ns_python | nssrc/com/citrix/netscaler/nitro/resource/config/appfw/appfwlearningsettings.py | Python | apache-2.0 | 24,257 | 0.029847 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License f | or the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.ni | tro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class appfwlearningsettings(base_resource) :
""" Configuration for learning settings resource. """
def __init__(self) :
self._profilename = ""
self._starturlminthreshold = 0
self._starturlpercentthreshold = 0
self._cookieconsistencyminthreshold = 0
self._cookieconsistencypercentthreshold = 0
self._csrftagminthreshold = 0
self._csrftagpercentthreshold = 0
self._fieldconsistencyminthreshold = 0
self._fieldconsistencypercentthreshold = 0
self._crosssitescriptingminthreshold = 0
self._crosssitescriptingpercentthreshold = 0
self._sqlinjectionminthreshold = 0
self._sqlinjectionpercentthreshold = 0
self._fieldformatminthreshold = 0
self._fieldformatpercentthreshold = 0
self._xmlwsiminthreshold = 0
self._xmlwsipercentthreshold = 0
self._xmlattachmentminthreshold = 0
self._xmlattachmentpercentthreshold = 0
self.___count = 0
@property
def profilename(self) :
ur"""Name of the profile.<br/>Minimum length = 1.
"""
try :
return self._profilename
except Exception as e:
raise e
@profilename.setter
def profilename(self, profilename) :
ur"""Name of the profile.<br/>Minimum length = 1
"""
try :
self._profilename = profilename
except Exception as e:
raise e
@property
def starturlminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn start URLs.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._starturlminthreshold
except Exception as e:
raise e
@starturlminthreshold.setter
def starturlminthreshold(self, starturlminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn start URLs.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._starturlminthreshold = starturlminthreshold
except Exception as e:
raise e
@property
def starturlpercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular start URL pattern for the learning engine to learn that start URL.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._starturlpercentthreshold
except Exception as e:
raise e
@starturlpercentthreshold.setter
def starturlpercentthreshold(self, starturlpercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular start URL pattern for the learning engine to learn that start URL.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._starturlpercentthreshold = starturlpercentthreshold
except Exception as e:
raise e
@property
def cookieconsistencyminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn cookies.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._cookieconsistencyminthreshold
except Exception as e:
raise e
@cookieconsistencyminthreshold.setter
def cookieconsistencyminthreshold(self, cookieconsistencyminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn cookies.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._cookieconsistencyminthreshold = cookieconsistencyminthreshold
except Exception as e:
raise e
@property
def cookieconsistencypercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular cookie pattern for the learning engine to learn that cookie.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._cookieconsistencypercentthreshold
except Exception as e:
raise e
@cookieconsistencypercentthreshold.setter
def cookieconsistencypercentthreshold(self, cookieconsistencypercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular cookie pattern for the learning engine to learn that cookie.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._cookieconsistencypercentthreshold = cookieconsistencypercentthreshold
except Exception as e:
raise e
@property
def csrftagminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn cross-site request forgery (CSRF) tags.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._csrftagminthreshold
except Exception as e:
raise e
@csrftagminthreshold.setter
def csrftagminthreshold(self, csrftagminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn cross-site request forgery (CSRF) tags.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._csrftagminthreshold = csrftagminthreshold
except Exception as e:
raise e
@property
def csrftagpercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular CSRF tag for the learning engine to learn that CSRF tag.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._csrftagpercentthreshold
except Exception as e:
raise e
@csrftagpercentthreshold.setter
def csrftagpercentthreshold(self, csrftagpercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular CSRF tag for the learning engine to learn that CSRF tag.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._csrftagpercentthreshold = csrftagpercentthreshold
except Exception as e:
raise e
@property
def fieldconsistencyminthreshold(self) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn field consistency information.<br/>Default value: 1<br/>Minimum length = 1.
"""
try :
return self._fieldconsistencyminthreshold
except Exception as e:
raise e
@fieldconsistencyminthreshold.setter
def fieldconsistencyminthreshold(self, fieldconsistencyminthreshold) :
ur"""Minimum number of application firewall sessions that the learning engine must observe to learn field consistency information.<br/>Default value: 1<br/>Minimum length = 1
"""
try :
self._fieldconsistencyminthreshold = fieldconsistencyminthreshold
except Exception as e:
raise e
@property
def fieldconsistencypercentthreshold(self) :
ur"""Minimum percentage of application firewall sessions that must contain a particular field consistency pattern for the learning engine to learn that field consistency pattern.<br/>Default value: 0<br/>Maximum length = 100.
"""
try :
return self._fieldconsistencypercentthreshold
except Exception as e:
raise e
@fieldconsistencypercentthreshold.setter
def fieldconsistencypercentthreshold(self, fieldconsistencypercentthreshold) :
ur"""Minimum percentage of application firewall sessions that must contain a particular field consistency pattern for the learning engine to learn that field consistency pattern.<br/>Default value: 0<br/>Maximum length = 100
"""
try :
self._fieldconsistencypercentthreshold = fieldconsistencypercentthreshold
except Exception as e:
|
SANDEISON/Python | 04 - Funções e Arquivos À Solta/03 -Agrupando códigos em Módulos/02 - embaralha_nome.py | Python | gpl-2.0 | 176 | 0.028409 | def embaralha (x):
import random
lista = list(x)
| random.shuffle(lista)
return ''.joi | n(lista)
nome = input ("Digite algum nome : ")
print (embaralha(nome))
|
michaelBenin/django-jinja | example_project/example_project/web/templatetags/testags.py | Python | bsd-3-clause | 301 | 0.006645 | # -*- coding: ut | f-8 -*-
from django_jinja.base import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_context |
def hello(name):
return "Hello" + name
|
GunioRobot/pywapi-dbus | setup.py | Python | lgpl-3.0 | 1,419 | 0.027484 | from distutils.core import setup
setup(name = "pywapi-dbus",
version = "0.1-git",
description = "D-Bus Python Weather API Service is a D-Bus service providing weather information",
author = "Sasu Karttunen",
author_email = "sasu.karttunen@tpnet.fi",
url = "https://github.com/skfin/pywapi-dbus",
scripts = ['pywapidbus/pywapi-dbus'],
packages = ['pywapidbus'],
data_files=[('share/dbus-1/services', ['pywapidbus/org.pywapi.Daemon.service'])],
long_description = """D-Bus Python Weather API Service is intended to provide weather information through D-Bus. It's main goal is to provide same functionality as Python Weathe | r API provides as Python libr | ary. D-Bus Python Weather API Service can be used in all programming languages that has working D-Bus libraries available.""",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python',
'Environment :: No Input/Output (Daemon)',
'Operating System :: UNIX',
'Topic :: Software Development :: Libraries', # Not actually a library. Don't blame us, blame trove classifiers.
'Topic :: Scientific/Engineering :: Atmospheric Science', # We can provide some athmosperic information :P
]
)
|
slava-sh/NewsBlur | apps/search/management/commands/index_stories.py | Python | mit | 993 | 0.006042 | import re
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from apps.rss_feeds.models import Feed
from apps.reader.models import UserSubscription
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-u", "--user", dest="user", nargs=1, help= | "Specify user id or username"),
)
def handle(self, *args, **options):
if re.match(r"([0-9]+)", options['user']):
user = Use | r.objects.get(pk=int(options['user']))
else:
user = User.objects.get(username=options['user'])
subscriptions = UserSubscription.objects.filter(user=user)
print " ---> Indexing %s feeds..." % subscriptions.count()
for sub in subscriptions:
try:
sub.feed.index_stories_for_search()
except Feed.DoesNotExist:
print " ***> Couldn't find %s" % sub.feed_id
|
emlid/ReachView | ReachLED.py | Python | gpl-3.0 | 5,937 | 0.004716 | #!/usr/bin/python
# ReachView code is placed under the GPL license.
# Written by Egor Fedorov (egor.fedorov@emlid.com)
# Copyright (c) 2015, Emlid Limited
# All rights reserved.
# If you are interested in using ReachView code as a part of a
# closed source project, please contact Emlid Limited (info@emlid.com).
# This file is part of ReachView.
# ReachView is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# ReachView is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with ReachView. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
from threading import Thread
from GPIO import GPIO
class ReachLED:
pwm_prefix = "/sys/class/pwm/pwmchip0/"
def __init__(self):
self.pins = [GPIO(12), GPIO(13), GPIO(182)] # green, red, blue
# thread, used to blink later
self.blinker_thread = None
# to stop blinker later
self.blinker_not_interrupted = True
# keep current state in order to restore later
self.current_blink_pattern = ""
self.colors_dict = {
"off": [0, 0, 0],
"red": [1, 0, 0],
"green": [0, 1, 0],
"blue": [0, 0, 1],
"white": [1, 1, 1],
"yellow": [1, 1, 0],
"cyan": [0, 1, 1],
"magenta": [1, 0, 1],
"orange": [1, 0.4, 0],
"weakred": [0.1, 0, 0]
}
# channel numbers
self.pwm_channels = [0, 1, 2] # red, green, blue
# first, we need to change the pin's pinmux to mode1
for pin in self.pins:
pin.setPinmux("mode1")
| # then, export the 3 pwn channels if needed
for ch in self.pwm_channels:
i | f not os.path.exists(self.pwm_prefix + "/pwm" + str(ch)):
with open(self.pwm_prefix + "export", "w") as f:
f.write(str(ch))
# enable all of the channels
for ch in self.pwm_channels:
with open(self.pwm_prefix + "pwm" + str(ch) + "/enable", "w") as f:
f.write("1")
# set period
for ch in self.pwm_channels:
with open(self.pwm_prefix + "pwm" + str(ch) + "/period", "w") as f:
f.write("1000000")
# turn off all of it by default
#for ch in self.pwm_channels:
# self.setDutyCycle(ch, 0)
def setDutyCycle(self, channel, percentage = None):
# 0% = 1000000
# 100% = 0
duty_value = (100 - percentage) * 10000
duty_value = int(duty_value)
with open(self.pwm_prefix + "pwm" + str(channel) + "/duty_cycle", "w") as f:
f.write(str(duty_value))
def setColor(self, color, power_percentage = None):
# available colors:
# red
# green
# blue
# white
# yellow
# cyan
# magenta
# defalt power percentage value
if power_percentage == None:
power_percentage = 100
if color in self.colors_dict:
for i in range(0, 3):
self.setDutyCycle(i, self.colors_dict[color][i] * power_percentage)
return 0
else:
# no such color available :(
return -1
def startBlinker(self, pattern, delay = None):
# start a new thread that blinks
self.current_blink_pattern = pattern
if self.blinker_thread == None:
self.blinker_not_interrupted = True
self.blinker_thread = Thread(target = self.blinkPattern, args = (pattern, delay))
self.blinker_thread.start()
else:
# we already have a blinker started and need to restart it using new colors
self.stopBlinker()
self.startBlinker(pattern, delay)
def stopBlinker(self):
# stop existing thread
self.blinker_not_interrupted = False
if self.blinker_thread is not None:
self.blinker_thread.join()
self.blinker_thread = None
def blinkPattern(self, pattern, delay = None):
# start blinking in a special pattern
# pattern is a string of colors, separated by commas
# for example: "red,blue,off"
# they will be flashed one by one
# and separated by a time of delay, which 0.5s by default
color_list = pattern.split(",")
if delay == None:
delay = 0.5
while self.blinker_not_interrupted:
for color in color_list:
if self.blinker_not_interrupted == False:
break
self.setColor(color)
time.sleep(delay)
def test():
led = ReachLED()
print("Starting...")
led.setDutyCycle(0, 0)
led.setDutyCycle(0, 0)
led.setDutyCycle(0, 0)
time.sleep(1)
print("After pause...")
print("Channel 0")
led.setDutyCycle(0, 100)
time.sleep(1)
print("Channel 1")
led.setDutyCycle(0, 0)
led.setDutyCycle(1, 100)
time.sleep(1)
print("Channel 2")
led.setDutyCycle(1, 0)
led.setDutyCycle(2, 100)
time.sleep(1)
if __name__ == "__main__":
# test()
led = ReachLED()
if len(sys.argv) < 2:
print("You need to specify a color")
print("List of colors:")
colors = ""
for color in led.colors_dict:
colors += color + ", "
print(colors)
else:
if led.setColor(sys.argv[1]) < 0:
print("Can't set this color. You may add this in the colors_dict variable")
|
cohadar/learn-python-the-hard-way | cohadar/game.py | Python | mit | 663 | 0.033183 | """Classes for a Game."""
class Room(object):
"""Room is something you can walk in and out of.
It has a name and a descript | ion.
It also has a dictionary of "paths" to other rooms.
"""
def __init__(self, name, description):
"""create a room with a name and description and no paths"""
self.name = name
self.description = description
self.paths = {}
def add_paths(self, paths):
"""Blah blah blah."""
self.paths.update(paths)
def go(self, direction):
"""Go go go, fire in the hole!"""
return self.paths.get(direction, None)
def | __repr__(self):
"""Ha, this is very useful in test debugging."""
return "Room(name=%s)" % self.name
|
Eigenlabs/EigenD | pisession/oldfixture.py | Python | gpl-3.0 | 1,459 | 0.007539 |
#
# Copyright 2009 Eigenlabs Ltd. http://www.eigenlabs.com
#
# This file is part of EigenD.
#
# EigenD is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# EigenD is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EigenD. If not, see <http://www.gnu.org/licenses/>.
#
import time
import unittest
import piagent
import picross
import piw
from pi import utils
class Fixture(unittest.TestCase):
def __init__(self,tests,logger=None,**kwds):
unittest.TestCase.__init__(self,tests)
self.manager = piagent.scaffold_st()
self.logger = logger
def setUp(self):
self.context = self.manager.context(ut | ils.notify(Non | e),utils.stringify(self.logger),'test')
piw.setenv(self.context.getenv())
if self.logger is not None:
self.oldstd = sys.stdout,sys.stderr
sys.stdout,sys.stderr = self.logger.Logger(),sys.stdout
def tearDown(self):
self.context.kill()
self.context = None
def once(self):
self.manager.once()
|
uxlsl/uxlsl.github.io | demo/code/2021-11-15/f.py | Python | mit | 135 | 0.007407 | #!/usr/bin/env python
# -*- | coding: ut | f-8 -*-
def f(k, n):
pass
assert f(1, 2) == 2
assert f(2, 6) == 3
assert f(3, 14) == 14
|
axaxaxas/daniel-powell-portfolio | narsil/nutils.py | Python | lgpl-3.0 | 6,200 | 0.029839 | # This software is distributed under the GNU Lesser General Public License.
# See the root of this repository for details.
# Copyright 2012 Daniel Powell
import threading
import math
import os
from struct import pack, unpack
### some default global parameters ###
FRAGMENT_SIZE = 4096
NARSIL_PORT = 28657 # it's the eighth Fibonacci prime
ALL_INTERFACES = '0.0.0.0' # for ipv6, change to '::'
FILENAME_LENGTH = 256
class LockDict:
# Dictionary mapping strings to lock objects.
# Used to manage locks for an arbitrary number of files
# while creating lock objects only as necessary.
# The acquire() and release() methods acquire and release locks.
# The release_all() method releases all locks at once.
# Lock objects can be safely removed from the
# dictionary instead of being released, allowing the lock
# to be GC'd if you aren't hanging on to a reference
# elsewhere. The free() and free_all() methods supply
# this functionality.
def __init__(self):
self.dict = {}
def acquire(self, name, blocking=True):
if filename not in self.dict:
self.dict[name] = threading.Lock()
return self.dict[name].acquire(blocking)
def release(self, name):
self.dict[name].release()
def release_all(self):
for f in self.dict:
self.release(f)
def free(self, name):
# in practice, it's probably best not to use free() or free_all()
# unless you're really sure you won't be needing the lock again
# and memory footprint is actually an issue. The default Narsil
# server doesn't free locks at all.
del self.dict[name]
def free_all(self):
for f in self.dict:
del self.dict[f]
class TransactError(Exception):
# Exception for failed narsil network transactions.
def __init__(self, description):
self.description = description
def __str__(self): # gracefully stringify ourself
return repr(self.description)
def choose(n, k):
# Returns n choose k.
if (k > n): return 0
return math.factorial(n)/(math.factorial(k)*math.factorial(n-k))
def min_src(hosts, n, k):
# Finds the minimum number of sources needed to reconstruct a file, given
# N and K parameters.
return choose(n-1,k) - choose(n,k) + hosts + 1
def find_parameters(hosts):
# Returns a dictionary mapping n,k pairs to tuples of
# an m-value and a k/n ratio for a given number of hosts.
results = {}
for n in xrange(1,hosts+1):
for k in xrange(2,n):
if choose(n,k) > hosts: break
if min_src(hosts, n, k) > hosts: break
if min_src(hosts, n, k) < 0: break
results[(n,k)] = (min_src(hosts,n,k), float(k)/float(n))
return results
def increment(L, n):
# Given a combination L (as a list) of integers on the interval [0, n),
# returns the next combination in left-handed ascending sequence
# if possible, or else returns False.
k = len(L)
car = L[0]
cdr = L[1:]
if L == [j for j in xrange(n-k,n)]:
return False
if k == 1:
return [car+1]
if cdr == [j for j in xrange(n-(k-1),n)]:
return [car+1+j for j in xrange(0,k)]
return [car] + increment(cdr,n)
def chunks(filename, numchunks):
# A generator that splits a file into a specified number of substrings in a
# lazy fashion.
# This is preferable to the eager method of reading the file into a list before splitting it
# because it does not require the entire file to be held in memory at once.
size = os.path.getsize(filename)
interval = (size/numchunks) + 1 # Adding 1 means the last chunk will be shorter, but ensures that
# the file will fit in numchunks total chunks with no spillover.
f = open(filename, 'rb')
chunk = f.read(interval)
for i in range(numchunks):
yield chunk
chunk = f.read(interval)
def shards(n, k):
# Given parameters n and k, returns an ordered list of all combinations.
shardlist = []
shard = [j for j in xrange(0,k)]
while shard:
shardlist = shardlist + [shard]
shard = increment(shard, n) # will become False when it can no longer be incremented
return shardlist
def fragments(string, fragmentsize):
# Generator that yields successive fragments of a string.
# Unlike chunks(str,int), the entire string is held in memory.
i = 0
while string[i:i+fragmentsize] != '':
yield string[i:i+fragmentsize]
i += fragmentsize
def fragment_count(string, fragmentsize):
# Calculates the number of fragments into which a string will be split.
if len(string)*(fragmentsize//len(string)) == fragmentsize: # if fragmentsize evenly divides len(string)
return fragmentsize//len(string)
else:
return fragmentsize//len(string) + 1
def post_header(filename, chunknumber, chunk):
# Generates a header for a "post" transaction.
remotename = filename + ".chunk" + str(chunknumber)
if len(remotename) > 255:
raise TransactError("Remote filename would be too long!")
else:
return (remotename + "**" + pack('!Q', len(chunk)) + "**")
def supply_header(filename):
# Generates a header for the reply to a "recv" request.
return pack('!Q', os.path.getsize(filename)) + "**"
def parse_post_header(header):
# Parses a file post header and returns the results in a dictionary.
# The keys are 'filename', 'size', and 'data'.
# 'data' is for payload bytes that got scooped up along with the header; parse_post_header can
# safely accept any substring of the transfer data beginning at byte 0 provided that
# it i | s at least as long as the header.
result = {}
result['filename'] = header.partition("**")[0]
result['size'] = unpack('!Q', header.partition("**")[2].partition("**")[0])[0]
result['data'] = header.partition("**")[2].partition("**")[2]
return result
def parse_supply_header(header):
# Parses a file supply header and returns the results in a dictionary.
# The two keys are 'size' and 'data'.
# 'data | ' is for payload bytes that got scooped up along with the header; parse_supply_header can
# safely accept any substring of the transfer data beginning at byte 0 provided that
# it is at least as long as the header.
result = {}
result['size'] = unpack('!Q', header.partition("**")[0])[0]
result['data'] = header.partition("**")[2]
return result
|
jalanb/kd | cde/timings.py | Python | mit | 1,061 | 0 | """Methods to handle times"""
import time
def now():
"""Current time
This method exists only to save other modules an extra import
"""
return time.time()
def time_since(number_of_seconds):
"""Convert number of seconds to English
Retain only the two most significant numbers
>>> expected = '13 hours, 2 minutes'
>>> actual = time_since(time.time() - (13*60*60 + 2*60 | + 5))
>>> assert actual == expected
"""
interval = int(abs(float(number_of_seconds)) - time.time())
interval = int(time.time() - float(number_of_seconds))
minutes, seconds = divmod(interval, 60)
if not minutes:
return "%s seconds" % seconds
hours, minutes = divmod(minutes, 60)
| if not hours:
return "%s minutes, %s seconds" % (minutes, seconds)
days, hours = divmod(hours, 24)
if not days:
return "%s hours, %s minutes" % (hours, minutes)
years, days = divmod(days, 365)
if not years:
return "%s days, %s hours" % (days, hours)
return "%s years, %s days" % (years, days)
|
gfelbing/cppstyle | cppstyle/model/function.py | Python | gpl-3.0 | 229 | 0.004367 | from .node import Node
class Function(Node):
def _ | _init__(self, file, position, access, comments, name, children):
super(Function, self).__init__(file, position, access, c | omments, children)
self.name = name
|
samuelclay/NewsBlur | apps/oauth/views.py | Python | mit | 31,341 | 0.006413 | import urllib.request, urllib.parse, urllib.error
import datetime
import lxml.html
import tweepy
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.conf import settings
from mongoengine.queryset import NotUniqueError
from mongoengine.queryset import OperationError
from apps.social.models import MSocialServices, MSocialSubscription, MSharedStory
from apps.social.tasks import SyncTwitterFriends, SyncFacebookFriends
from apps.reader.models import UserSubscription, UserSubscriptionFolders, RUserStory
from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag
from apps.analyzer.models import compute_story_score
from apps.rss_feeds.models import Feed, MStory, MStarredStoryCounts, MStarredStory
from apps.rss_feeds.text_importer import TextImporter
from utils import log as logging
from utils.user_functions import ajax_login_required, oauth_login_required
from utils.view_functions import render_to
from utils import urlnorm
from utils import json_functions as json
from vendor import facebook
@login_required
@render_to('social/social_connect.xhtml')
def twitter_connect(request):
twitter_consumer_key = settings.TWITTER_CONSUMER_KEY
twitter_consumer_secret = settings.TWITTER_CONSUMER_SECRET
oauth_token = request.GET.get('oauth_token')
oauth_verifier = request.GET.get('oauth_verifier')
denied = request.GET.get('denied')
if denied:
logging.user(request, "~BB~FRDenied Twitter connect")
return {'error': 'Denied! Try connecting again.'}
elif oauth_token and oauth_verifier:
try:
auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret)
auth.request_token = request.session['twitter_request_token']
# auth.set_request_token(oauth_token, oauth_verifier)
auth.get_access_token(oauth_verifier)
api = tweepy.API(auth)
twitter_user = api.me()
except (tweepy.TweepError, IOError) as e:
logging.user(request, "~BB~FRFailed Twitter connect: %s" % e)
return dict(error="Twitter has returned an error. Try connecting again.")
# Be sure that two people aren't using the same Twitter account.
existing_user = MSocialServices.objects.filter(twitter_uid=str(twitter_user.id))
if existing_user and existing_user[0].user_id != request.user.pk:
try:
user = User.objects.get(pk=existing_user[0].user_id)
logging.user(request, "~BB~FRFailed Twitter connect, another user: %s" % user.username)
return dict(error=("Another user (%s, %s) has "
"already connected with those Twitter credentials."
% (user.username, user.email or "no email")))
except User.DoesNotExist:
existing_user.delete()
social_services = MSocialServices.get_user(request.user.pk)
social_services.twitter_uid = str(twitter_user.id)
social_services.twitter_access_key = auth.access_token
social_services.twitter_access_secret = auth.access_token_secret
social_services.syncing_twitter = True
social_services.save()
SyncTwitterFriends.delay(user_id=request.user.pk)
logging.user(request, "~BB~FRFinishing Twitter connect")
return {}
else:
# Start the OAuth process
auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret)
auth_url = auth.get_authorization_url()
request.session['twitter_request_token'] = auth.request_token
logging.user(request, "~BB~FRStarting Twitter connect: %s" % auth.request_token)
return {'next': auth_url}
@login_required
@render_to('social/social_connect.xhtml')
def facebook_connect(request):
facebook_app_id = settings.FACEBOOK_APP_ID
facebook_secret = settings.FACEBOOK_SECRET
args = {
"client_id": facebook_app_id,
"redirect_uri": "https://" + Site.objects.get_current().domain + '/oauth/facebook_connect',
"scope": "user_friends",
"display": "popup",
}
verification_code = request.GET.get('code')
if verification_code:
args["client_secret"] = facebook_secret
args["code"] = verification_code
uri = "https://graph.facebook.com/oauth/access_token?" + \
urllib.parse.urlencode(args)
response_text = urllib.request.urlopen(uri).read()
response = json.decode(response_text)
if "access_token" not in response:
logging.user(request, "~BB~FRFailed Facebook connect, no access_token. (%s): %s" % (args, response))
return dict(error="Facebook has returned an error. Try connecting again.")
access_token = response["access_token"]
# Get the user's profile.
| graph = facebook.GraphAPI(access_token)
profile = graph.get_object("me")
uid = profile["id"]
# Be sure that two people aren't using the same Facebook account.
existing_user = MSocialServices.objects.filter(facebook_uid=uid)
if existing_user and existing_u | ser[0].user_id != request.user.pk:
try:
user = User.objects.get(pk=existing_user[0].user_id)
logging.user(request, "~BB~FRFailed FB connect, another user: %s" % user.username)
return dict(error=("Another user (%s, %s) has "
"already connected with those Facebook credentials."
% (user.username, user.email or "no email")))
except User.DoesNotExist:
existing_user.delete()
social_services = MSocialServices.get_user(request.user.pk)
social_services.facebook_uid = uid
social_services.facebook_access_token = access_token
social_services.syncing_facebook = True
social_services.save()
SyncFacebookFriends.delay(user_id=request.user.pk)
logging.user(request, "~BB~FRFinishing Facebook connect")
return {}
elif request.GET.get('error'):
logging.user(request, "~BB~FRFailed Facebook connect, error: %s" % request.GET.get('error'))
return {'error': '%s... Try connecting again.' % request.GET.get('error')}
else:
# Start the OAuth process
logging.user(request, "~BB~FRStarting Facebook connect")
url = "https://www.facebook.com/dialog/oauth?" + urllib.parse.urlencode(args)
return {'next': url}
@ajax_login_required
def twitter_disconnect(request):
logging.user(request, "~BB~FRDisconnecting Twitter")
social_services = MSocialServices.objects.get(user_id=request.user.pk)
social_services.disconnect_twitter()
return HttpResponseRedirect(reverse('load-user-friends'))
@ajax_login_required
def facebook_disconnect(request):
logging.user(request, "~BB~FRDisconnecting Facebook")
social_services = MSocialServices.objects.get(user_id=request.user.pk)
social_services.disconnect_facebook()
return HttpResponseRedirect(reverse('load-user-friends'))
@ajax_login_required
@json.json_view
def follow_twitter_account(request):
username = request.POST['username']
code = 1
message = "OK"
logging.user(request, "~BB~FR~SKFollowing Twitter: %s" % username)
if username not in ['samuelclay', 'newsblur']:
return HttpResponseForbidden()
social_services = MSocialServices.objects.get(user_id=request.user.pk)
try:
api = social_services.twitter_api()
api.create_friendship(username)
except tweepy.TweepError as e:
code = -1
message = e
return {'code': code, 'message': message}
@ajax_login_required
@json.json_view
def unfollow_twitter_account(request):
username = request.POST['username']
code = 1
message = "OK"
logging.user(request, "~BB~FRUnfollo |
mind1master/aiohttp | aiohttp/worker.py | Python | apache-2.0 | 5,538 | 0 | """Async gunicorn worker for aiohttp.web"""
import asyncio
import os
import signal
import ssl
import sys
import gunicorn.workers.base as base
from aiohttp.helpers import ensure_future
__all__ = ('GunicornWebWorker', 'GunicornUVLoopWebWorker')
class GunicornWebWorker(base.Worker):
def __init__(self, *args, **kw): # pragma: no cover
super().__init__(*args, **kw)
self.servers = {}
self.exit_code = 0
def init_process(self):
# create new event_loop after fork
asyncio.get_event_loop().close()
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
super().init_process()
def run(self):
self._runner = ensure_future(self._run(), loop=self.loop)
try:
self.loop.run_until_complete(self._runner)
finally:
self.loop.close()
sys.exit(self.exit_code)
def make_handler(self, app):
return app.make_handler(
logger=self.log,
debug=self.cfg.debug,
timeout=self.cfg.timeout,
keep_alive=self.cfg.keepalive,
access_log=self.log.access_log,
access_log_format=self.cfg.access_log_format)
@asyncio.coroutine
def close(self):
if self.servers:
servers = self.servers
self.servers = None
# stop accepting connections
for server, handler in servers.items():
self.log.info("Stopping server: %s, connections: %s",
self.pid, len(handler.connections))
server.close()
# send on_shutdown event
yield from self.wsgi.shutdown()
# stop alive connections
tasks = [
handler.finish_connections(
timeout=self.cfg.graceful_timeout / 100 * 95)
for handler in servers.values()]
yield from asyncio.wait(tasks, loop=self.loop)
# stop application
yield from self.wsgi.finish()
@asyncio.coroutine
def _run(self):
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
for sock in self.sockets:
handler = self.make_handler(self.wsgi)
srv = yield from self.loop.create_server(handler, sock=sock.sock,
ssl=ctx)
self.servers[srv] = handler
# If our parent changed then we shut down.
pid = os.getpid()
try:
while self.alive:
self.notify()
if pid == os.getpid() and self.ppid != os.getppid():
self.alive = False
self.log.info("Parent changed, shutting down: %s", self)
else:
yield from asyncio.sleep(1.0, loop=self.loop)
if self.cfg.max_requests and self.servers:
connections = 0
for _, handler in self.servers.items():
connections += handler.num_connections
if connections > self.cfg.max_requests:
self.alive = False
self.log.info("Max requests, shutting down: %s", self)
except BaseException:
pass
yield from self.close()
def init_signals(self):
# Set up signals through the event loop API.
self.loop.add_signal_handler(signal.SIGQUIT, self.handle_quit,
signal.SIGQUIT, None)
self.loop.add_signal_handler(signal.SIGTERM, self.handle_exit,
signal.SIGTERM, None)
self.loop.add_signal_handler(signal.SIGINT, self.handle_quit,
| signal.SIGINT, None)
self.loop.add_signal_handler(signal.SIGWINCH, se | lf.handle_winch,
signal.SIGWINCH, None)
self.loop.add_signal_handler(signal.SIGUSR1, self.handle_usr1,
signal.SIGUSR1, None)
self.loop.add_signal_handler(signal.SIGABRT, self.handle_abort,
signal.SIGABRT, None)
# Don't let SIGTERM and SIGUSR1 disturb active requests
# by interrupting system calls
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)
def handle_quit(self, sig, frame):
self.alive = False
def handle_abort(self, sig, frame):
self.alive = False
self.exit_code = 1
@staticmethod
def _create_ssl_context(cfg):
""" Creates SSLContext instance for usage in asyncio.create_server.
See ssl.SSLSocket.__init__ for more details.
"""
ctx = ssl.SSLContext(cfg.ssl_version)
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
ctx.verify_mode = cfg.cert_reqs
if cfg.ca_certs:
ctx.load_verify_locations(cfg.ca_certs)
if cfg.ciphers:
ctx.set_ciphers(cfg.ciphers)
return ctx
class GunicornUVLoopWebWorker(GunicornWebWorker):
def init_process(self):
import uvloop
# Close any existing event loop before setting a
# new policy.
asyncio.get_event_loop().close()
# Setup uvloop policy, so that every
# asyncio.get_event_loop() will create an instance
# of uvloop event loop.
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
super().init_process()
|
kichkasch/rtmom | rtmom.py | Python | gpl-3.0 | 6,976 | 0.007024 | #!/usr/bin/env python
"""
Main module for rtmom
Elementary based client for "Remember the Milk" (http://www.rememberthemilk.com/) written in Python.
Copyright (C) 2010 Michael Pilgermann <kichkasch@gmx.de>
http://github.com/kichkasch/rtmom
rtmom is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
rtmom is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with rtmom. If not, see <http://www.gnu.org/licenses/>.
"""
import config
import rtm
class RTMOM():
"""
Local data management for rtmom
@ivar _categories: Maps names of Categories (lists) for tasks to their corresponding IDs
| @ivar _tasks: Holds information about all tasks in memory
"""
def __init__(self):
"""
Constructor - empy initialize categories and tasks
"""
self._categories = None
self._tasks = None
def getCategories(self):
"""
Return categories in memory
| """
return sorted(self._categories.keys())
def getFullTasks(self, category):
"""
Return tasks in memory
"""
return self._tasks[category]
def getFullTaskFromName(self, taskName):
"""
Searches for the full task using the task name
"""
for cat in self.getCategories():
for fullTask in self.getFullTasks(cat):
if fullTask.name == taskName:
return fullTask
raise ValueError('Task with given name could not be found.')
def getTasks(self, cat):
"""
Returns names of tasks in memory
"""
taskNames = []
for task in self.getFullTasks(cat):
taskNames.append(task.name)
return taskNames
def doLoadFromFile(self, fileHandler):
"""
Populate local lists with content from local file
"""
print "--- Loading from local cache"
self._tasks, self._categories = fileHandler.loadFromFile()
print "\t Success"
def doSaveToFile(self, fileHandler):
"""
Write all information from local lists (memory) to local file
"""
print "--- Saving to local cache"
fileHandler.saveToFile(self._tasks, self._categories)
print "\t Success"
def updateFromNet(self, netHandler):
"""
Populate local lists (memory) with content from RTM service in the Internet
"""
print "--- Updating from Net"
self._categories = netHandler.loadCategories()
self._tasks = {}
for name, id in self._categories.items():
self._tasks[name] = netHandler.loadFullTasks(id)
print "\t Success"
def markTaskComplete(self, netHandler, catName, fullTask):
"""
Mark a single task completed
"""
print "--- Marking one task complete"
catId = self._categories[catName]
netHandler.markTaskCompleted(catId, fullTask)
print "\t Success"
extractor = None
def getExtractor():
"""
Singleton
"""
global extractor
if not extractor:
extractor = InformationExtractor()
return extractor
class InformationExtractor():
"""
Extract (and format) information coming from RTM
"""
def __init__(self):
pass
def extractTags(self, task, delimiter = ", "):
"""
Parse dotted dict structure for tag information and assemble a string from it
"""
ret = ""
if not isinstance(task.tags, list): # no tag at all
if isinstance(task.tags.tag, list):
ret += task.tags.tag[0]
for tagEntry in task.tags.tag[1:]:
ret += delimiter + tagEntry
else:
ret += task.tags.tag
return ret
def extractTaskSeriesFromDottedDict(self, taskseries):
"""
Parse dotted dict structure of taskseries and return flat list of tasks
"""
tasks = []
if isinstance(taskseries, (list, tuple)):
for t in taskseries:
tasks.append(t)
else:
tasks.append(taskseries)
return tasks
def extractTasksFromDottedDict(self, taskList):
"""
Resolves the 'funny' structure of so-called DottedDict for tasks
"""
if isinstance(taskList, list): # indicates an empty taskList (sometimes; some funny things go on with this API here)
return []
try:
taskList = taskList.list
except AttributeError: # this also sometimes indicates an empty list (in the dotted dict the attribute 'list' is just missing - in contrast to all other dottedDicts)
return []
tasks = []
if not isinstance(taskList, (list, tuple)):
ret = self.extractTaskSeriesFromDottedDict(taskList.taskseries)
tasks.extend(ret)
else:
for l in taskList:
ret = self.extractTaskSeriesFromDottedDict(l.taskseries)
tasks.extend(ret)
return tasks
def replaceCharactersBefore(self, string, maxLen = 0):
"""
If a string shall be diplayed in Elementary on a label / text field some characters make it ugly; use this function to replace a pre-defined set of those
If you provide a maxLen this function will also trunc your string (and put a tripple dot at the end)
"""
ret = str(string)
ret = ret.replace('<', '(')
ret = ret.replace('>', ')')
ret = ret.replace('\/', '/')
if maxLen:
if len(ret) > maxLen:
ret = ret[:maxLen-3]+"..."
return ret
def replaceCharactersAfter(self, string):
"""
Ones your string is ready for displaying call this function again for formatting issues
Line break etc.
"""
ret = str(string)
ret = ret.replace('\n', '<br>')
return ret
def formatNote(self, note):
"""
Retrieves note information from a task and assembles the corresponding string for a text field
"""
ret = """<b>""" + self.replaceCharactersBefore(note.title)+ """</>\n""" + self.replaceCharactersBefore(getattr(note,'$t')) # the note content is hidden in the XML content (here $t)
return self.replaceCharactersAfter(ret)
"""
This starts rtmom
"""
if __name__ == '__main__':
import gui.rtmom_gui
gui.rtmom_gui.initAndRun()
|
evereux/flask_template | tests.py | Python | mit | 1,928 | 0.006743 | #! python 3
# -*- coding: utf8 -*-
from coverage import coverage
cov = coverage(branch=True, omit=['venv/*', 'tests.py'])
cov.start()
import os
import unittest
from datetime import datetime, timedelta
import bcrypt
from config import basedir, SQLALCHEMY_DATABASE_URI
from application import app, db
from application.models import User, Group
class TestCase(unittest.TestCase):
def setUp(self):
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
db.create_all()
def tearDown(self):
db.session.remove()
def test_user(self):
password = 'testicles'
password = password.encode('utf-8')
password = bcrypt.hashpw(password, bcrypt.gensalt())
u = User(name='John Doe', email='john@example.com', username='jdoe', password= | password)
db.session.add(u)
assert u.is_authenticated is True
assert u.is_active is True
assert u.is_anonymous is False
# assert u.id == u.get_id() # not possible unless db session i
def test_groups(self):
password = 'testicles'
password = password.encode('utf-8')
password = bcrypt.hashpw(password, bcrypt.gensalt())
| # add user
u1 = u = User(name='John Doe', email='john@example.com', username='jdoe', password=password)
db.session.add(u1)
# add group
g = Group(group_name='group_test')
db.session.add(g)
# add user to group
g.users.append(u1)
assert u1.groups.count() == 1
if __name__ == '__main__':
try:
unittest.main()
except:
pass
cov.stop()
cov.save()
print('\n\nCoverage Report:\n')
cov.report()
print('\nHTML version: {}'.format(os.path.join(basedir, 'tmp/coverage/index.html')))
cov.html_report(directory='tmp/coverage')
cov.erase()
|
Azure/azure-sdk-for-python | sdk/servicefabric/azure-mgmt-servicefabric/setup.py | Python | mit | 2,683 | 0.001491 | #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-mgmt-servicefabric"
PACKAGE_PPRINT_NAME = "Service Fabric Management"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: | 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3 | .10',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.mgmt',
]),
install_requires=[
'msrest>=0.6.21',
'azure-common~=1.1',
'azure-mgmt-core>=1.3.0,<2.0.0',
],
python_requires=">=3.6"
)
|
hgl888/flatbuffers | tests/MyGame/Example/AnyUniqueAliases.py | Python | apache-2.0 | 169 | 0.005917 | # automatically generated by the Fla | tBuffers compiler, do not modify
# namespace: Example
class AnyUniqu | eAliases(object):
NONE = 0
M = 1
T = 2
M2 = 3
|
be-cloud-be/horizon-addons | partner-contact/partner_financial_risk/wizard/__init__.py | Python | agpl-3.0 | 60 | 0 | # -*- cod | ing: ut | f-8 -*-
from . import parner_risk_exceeded
|
ellak-monades-aristeias/naturebank | naturebank/management/commands/migrate_legacy_data.py | Python | agpl-3.0 | 42,355 | 0.001063 | import os
from django.conf impor | t settings
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from django.utils.encoding import DjangoUnicodeDecodeError
from naturebank.models import (
AbandonmentOption,
Biotope,
BiotopeCategoryOption,
ClimateOption | ,
ConditionOption,
ConservationOption,
CulturalValueOption,
DesignationOption,
EcologicalValueOption,
GeoCodeOption,
HabitationOption,
HumanActivityOption,
KnowledgeOption,
OwnerOption,
SiteTypeOption,
SitetypeOption,
SocialReactionOption,
SocialValueOption,
Species,
SpeciesBiotope,
SpeciesCategoryOption,
SpeciesConservationOption,
SpeciesConservationPriorityOption,
SpeciesKnowledgeOption,
SpeciesPlantKindOption,
SpeciesProtectionOption,
SpeciesRarityOption,
SpeciesTrendOption,
ThreatOption,
TrendOption,
TrendPopOption,
WideArea,
)
from naturebank_legacy.models import Biolocation
from naturebank_legacy.models import Biosuppl as LegacyBiosuppl
from naturebank_legacy.models import Biotopes as LegacyBiotope
from naturebank_legacy.models import (
Charr,
CodeConserv,
CodeWide,
Desig,
Docu,
Geocodes,
Geology,
History,
Human,
Infra,
Lengthwidth,
Lutclimate,
Lutculturalvalue,
Lutdesignation,
Lutecologicalvalue,
Luthabitation,
Luthumanactivity,
Lutsitetype,
Lutsocialvalue,
LutspeciesConse,
LutspeciesConsPrio,
LutspeciesKnowledge,
LutspeciesOrg2,
LutspeciesOrganism,
LutspeciesProtection,
LutspeciesRaret,
LutspeciesTrend,
Lutthreat,
OptAbandon,
OptCondition,
OptKnowledge,
OptOwn,
OptSocial,
OptTrend,
OptTrendPop,
Own,
OwnType,
Paths,
Qual,
)
from naturebank_legacy.models import Species as LegacySpecies
from naturebank_legacy.models import (
Specor,
Tblclimate,
Tblculturalvalue,
Tbldesignation,
Tblecologicalvalue,
Tblhabitation,
Tblhumanactivity,
Tblsitetype,
Tblsocialvalue,
Tblspeciescomment,
Tblthreat,
Tourism,
Viewing,
Vuln,
)
class Command(BaseCommand):
help = "Management command for the migration of legacy data to naturebank."
def handle(self, *args, **options):
# Fill the Lookup Tables for Species
SpeciesCategoryOption.objects.all().delete()
lut = LutspeciesOrg2.objects.all()
for entry in lut:
draft = SpeciesCategoryOption(abbreviation=entry.org2, name=entry.descorg2)
draft.save()
SpeciesPlantKindOption.objects.all().delete()
lut = LutspeciesOrganism.objects.all()
for entry in lut:
draft = SpeciesPlantKindOption(
abbreviation=entry.organism, name=entry.descorganism
)
draft.save()
SpeciesKnowledgeOption.objects.all().delete()
lut = LutspeciesKnowledge.objects.all()
for entry in lut:
draft = SpeciesKnowledgeOption(
abbreviation=entry.knowledge, name=entry.descknowledge
)
draft.save()
SpeciesProtectionOption.objects.all().delete()
lut = LutspeciesProtection.objects.all()
for entry in lut:
draft = SpeciesProtectionOption(
abbreviation=entry.protection, name=entry.descprotection
)
draft.save()
SpeciesConservationPriorityOption.objects.all().delete()
lut = LutspeciesConsPrio.objects.all()
for entry in lut:
draft = SpeciesConservationPriorityOption(
abbreviation=entry.cons_prio, name=entry.descconservationpriority
)
draft.save()
SpeciesTrendOption.objects.all().delete()
lut = LutspeciesTrend.objects.all()
for entry in lut:
draft = SpeciesTrendOption(abbreviation=entry.trend, name=entry.desctrend)
draft.save()
SpeciesConservationOption.objects.all().delete()
lut = LutspeciesConse.objects.all()
for entry in lut:
draft = SpeciesConservationOption(
abbreviation=entry.conse, name=entry.descconservation
)
draft.save()
SpeciesRarityOption.objects.all().delete()
lut = LutspeciesRaret.objects.all()
for entry in lut:
draft = SpeciesRarityOption(abbreviation=entry.raret, name=entry.descrarety)
draft.save()
# Fill the naturebank.Species table
Species.objects.all().delete()
legacy = LegacySpecies.objects.all()
for entry in legacy:
draft = Species(
creation_date=entry.date1,
update_date=entry.update1,
# Maybe we switch to Float again (cause it is a double in C)
species_code=int(entry.speci_code),
species_name=entry.speci_name,
sub_species=entry.sub_speci,
other_names=entry.oth_names,
species_name_gr=entry.name_gr,
habitat=entry.habitat,
expansion=entry.expans,
origin=entry.origin,
respondent=entry.respondent,
category_ende=int(entry.categ_ende),
category_migr=int(entry.categ_migr),
category_bree=int(entry.categ_bree),
category_resi=int(entry.categ_resi),
category_intr=int(entry.categ_intr),
category=entry.category,
measures_take=entry.meas_take,
measures_need=entry.meas_need,
threat_hunt=int(entry.threa_hunt),
threat_fish=int(entry.threa_fish),
threat_coll=int(entry.threa_coll),
threat_fore=int(entry.threa_fore),
threat_graz=int(entry.threa_graz),
threat_poll=int(entry.threa_poll),
threat_cult=int(entry.threa_cult),
threat_tour=int(entry.threa_tour),
threat_road=int(entry.threa_road),
threat_buil=int(entry.threa_buil),
threat_drai=int(entry.threa_drai),
threat_eutr=int(entry.threa_eutr),
threat_pest=int(entry.threa_pest),
threat_other=int(entry.threa_othe),
exploit_hunt=int(entry.expl_hunt),
exploit_fish=int(entry.expl_fish),
exploit_coll=int(entry.expl_coll),
exploit_logg=int(entry.expl_logg),
exploit_graz=int(entry.expl_graz),
threat=entry.threat,
)
# Foreign Keys Handling
try:
val = entry.org2
if val:
draft.species_category = SpeciesCategoryOption.objects.get(
abbreviation=val
)
except SpeciesCategoryOption.DoesNotExist:
print("SpeciesCategoryOption entry does not exist")
try:
val = entry.organism
if val:
draft.plant_kind = SpeciesPlantKindOption.objects.get(
abbreviation=val
)
except SpeciesPlantKindOption.DoesNotExist:
print("SpeciesPlantKindOption entry does not exist")
try:
val = entry.knowledge
if val:
draft.knowledge = SpeciesKnowledgeOption.objects.get(
abbreviation=val
)
except SpeciesKnowledgeOption.DoesNotExist:
print("SpeciesKnowledgeOption entry does not exist")
try:
val = entry.protection
if val:
draft.protection = SpeciesProtectionOption.objects.get(
abbreviation=val
)
except SpeciesProtectionOption.DoesNotExist:
print("SpeciesProtectionOption entry does not exist")
try:
val = entry.cons_prio
if val:
|
DasIch/argvard | argvard/signature.py | Python | apache-2.0 | 7,016 | 0.000143 | # coding: utf-8
# Copyright 2013 Daniel Neuhäuser
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
argvard.signature
~~~~~~~~~~~~~~~~~
:copyright: 2013 by Daniel Neuhäuser
:license: Apache License 2.0, see LICENSE for more details
"""
import re
from argvard.exceptions import InvalidSignature, ArgumentMissing
_TOKENS = [
('identifier', r'[a-zA-Z_][a-zA-Z_0-9]*'),
('repetition', r'\.\.\.'),
('[', r'\['),
(']', r'\]'),
('space', r' +')
]
_OPTION_TOKENS = [
(name, regex) for name, regex in _TOKENS
if name not in ['repetition', '[', ']']
]
def _build_tokenizer(tokens):
regex = re.compile('|'.join('(%s)' % regex for name, regex in tokens))
def _tokenize(string):
position = 0
while position < len(string):
match = regex.match(string, position)
if match:
position = match.end()
yield tokens[match.lastindex - 1][0], match.group()
else:
raise InvalidSignature(string, position)
return _tokenize
_tokenize = _build_tokenizer(_TOKENS)
_option_tokenize = _build_tokenizer(_OPTION_TOKENS)
def _parse_signature(signature, option=True):
if option:
tokens = _option_tokenize(signature)
else:
tokens = _tokenize(signature)
state = 0, list(tokens)
patterns = []
state = _parse_words(state, patterns)
assert state[0] == len(state[1])
return patterns
def _parse_words(state, patterns):
position, tokens = state
while position < len(tokens):
if tokens[position][0] == ']':
break
position, tokens = _parse_word((position, tokens), patterns)
if position < len(tokens) and tokens[position][0] == ']':
break
return position, tokens
def _parse_word(state, patterns):
position, tokens = _either(state, patterns, [
_parse_optional, _parse_repetition, _parse_argument
])
if position < len(tokens):
if tokens[position][0] == 'space':
position += 1
elif tokens[position][0] != ']':
raise InvalidSignature()
return position, tokens
def _parse_optional(state, patterns):
position, tokens = state
if tokens[position][0] != '[':
raise InvalidSignature('expected [, got %r' % (tokens[position], ))
position += 1
rv = []
position, tokens = _parse_word((position, tokens), rv)
position, tokens = _parse_words((position, tokens), rv)
if tokens[position][0] != ']':
raise InvalidSignature('expected ], got %r' % (tokens[position], ))
position += 1
patterns.append(Optional(rv))
return position, tokens
def _parse_repetition(state, patterns):
position, tokens = state
if position + 1 >= len(tokens):
raise InvalidSignature('expected at least one more token')
if tokens[position + 1][0] != 'repetition':
raise InvalidSignature(
'expected repetition as next token, got %r' % (tokens[position + 1], )
)
if tokens[position][0] != 'identifier':
raise InvalidSignature(
'expected identifier, got %r' % (tokens[position], )
)
patterns.append(Repetition(Argument(tokens[position][1])))
return position + 2, tokens
def _parse_argument(state, patterns):
position, tokens = state
type, lexeme = tokens[position]
if type == 'identifier':
patterns.append(Argument(lexeme))
return position + 1, tokens
raise InvalidSignature('expected identifier, got: %r' % ((type, lexeme), ))
def _either(state, patterns, parsers):
for parser in parsers:
transaction = []
| try:
state = parser(state, transaction)
patterns.extend(tr | ansaction)
return state
except InvalidSignature:
pass
raise
class Signature(object):
"""
Represents a signature using patterns.
"""
@classmethod
def from_string(cls, string, option=True):
"""
Returns a :class:`Signature` object based on the given `string`. If
`option` is `True`, repetitions or optional patterns will not be
allowed.
If the `string` cannot be parsed, :exc:`InvalidSignature` is raised.
"""
return cls(_parse_signature(string, option=option))
def __init__(self, patterns):
self.patterns = patterns
@property
def usage(self):
"""
A usage string that describes the signature.
"""
return u' '.join(u'<%s>' % pattern.usage for pattern in self.patterns)
def parse(self, argv):
"""
Parses the given `argv` and returns a dictionary mapping argument names
to the values found in `argv`.
"""
rv = {}
for pattern in self.patterns:
pattern.apply(rv, argv)
return rv
def call_with_arguments(self, callable, argv):
"""
Parses `argv` and calls `callable` with the result.
"""
return callable(**self.parse(argv))
class Argument(object):
"""
Represents a positional argument with the given `name`.
"""
def __init__(self, name):
self.name = name
@property
def usage(self):
return self.name
def apply(self, result, argv):
try:
result[self.name] = next(argv)
except StopIteration:
raise ArgumentMissing('%s is missing' % self.usage)
class Repetition(object):
"""
Represents one or more occurences of the given `pattern`.
"""
def __init__(self, pattern):
self.pattern = pattern
@property
def usage(self):
return self.pattern.usage + u'...'
def apply(self, result, argv):
remaining = list(argv)
if remaining:
self.pattern.apply(result, iter([remaining]))
else:
raise ArgumentMissing('%s is missing' % self.usage)
class Optional(object):
"""
Represents an optional occurence of the given `patterns`.
"""
def __init__(self, patterns):
self.patterns = patterns
@property
def usage(self):
return u'[%s]' % u' '.join(pattern.usage for pattern in self.patterns)
def apply(self, result, argv):
transaction = {}
position = argv.position
try:
for pattern in self.patterns:
pattern.apply(transaction, argv)
except ArgumentMissing:
argv.position = position
else:
result.update(transaction)
|
gosom/back-to-basics | search/build-demo-data.py | Python | unlicense | 1,168 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import forgery_py
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', dest='init_size', type=int, default=1000,
help='The initial number of entries.DEFAULT | 1000')
parser.add_argument('-s', dest='num', type=int, default=10,
help='How many catalogs to build. Each one has'
' double number of elements than the previous.'
'DEFAULT 10')
parser.add_argument('-f', dest='fname', default='data/%d_catalog.txt',
help='The filename to store the catalogs.'
'DEFAULT catalog%%d.txt')
args = parser.parse_args()
num_ent | ries = args.init_size
for i in xrange(1, args.num+1):
catalog = args.fname % i
with open(catalog, 'w') as f:
for _ in xrange(num_entries):
name = forgery_py.forgery.name.full_name()
phone = forgery_py.forgery.address.phone()
print >> f, name + '\t' + phone
num_entries *= 2
if __name__ == '__main__':
main()
|
USCSoftwareEngineeringClub/pyceratOpsRecs | src/interface/hello.py | Python | mit | 1,646 | 0.044957 | import requests, urllib, httplib, base64
from flask import Flask, render_template, request, jsonify
app = Flask(__name__)
@app.route("/")
def hello():
return render_template("index.html")
@app.route("/search", methods=['POST', 'GET'])
def callAPI():
error = None
_url = 'https://api.projectoxford.ai/vision/v1.0/ocr' #https://api.projectoxford.ai/vision/v1.0/ocr[?language][&detectOrientation ]
_key = "f8968ffd96d2475cb7ec347c51f24e3e" #Here you have to paste your primary | key it is a header
_maxNumRetries = 10
bodyURL = request.args.get('uri','')
print(bodyURL)
headersIn = {
"Content-Type": "application/json",
"Host": "api.projectoxford.ai",
"Ocp-Apim-Subscription-Key": _key
}
paramsIn = urllib.urlencode({
"language": "en",
"detectOrientation": "false"
})
data={"url":"https://csgsarchitects.files.wordpress.com/2011/12/111_new-blog.jpg"}
try:
r = requests.post(_url, json=data,\
| params=paramsIn, headers=headersIn)
print r.json()
returnVal = {"data": r.json()}
return returnVal
#
# conn = httplib.HTTPSConnection('api.projectoxford.ai')
# conn.request("POST", "/vision/v1.0/ocr?%s" % paramsIn, "{body}", headersIn)
# response = conn.getresponse()
# data = response.read()
# print(data)
# conn.close()
#
# print 'hello'
# conn.request("POST", "/vision/v1.0/ocr?%s" % params, {"url":"http://example.com/images/test.jpg"}, headers)
# response = conn.getresponse()
# data = response.read()
# print(data)
# conn.close()
except Exception as e:
print(e)
return e
if __name__ == "__main__":
app.run(host='0.0.0.0')
|
ESS-LLP/frappe | frappe/integrations/doctype/ldap_settings/ldap_settings.py | Python | mit | 4,156 | 0.029355 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import cstr
from frappe.model.document import Document
class LDAPSettings(Document):
def validate(self):
if not self.flags.ignore_mandatory:
self.validate_ldap_credentails()
def validate_ldap_credentails(self):
try:
import ldap
conn = ldap.initialize(self.ldap_server_url)
try:
if self.ssl_tls_mode == 'StartTLS':
conn.set_option(ldap.OPT_X_TLS_DEMAND, True)
if self.require_trusted_certificate == 'Yes':
conn.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_DEMAND)
conn.start_tls_s()
except:
frappe.throw(_("StartTLS is not supported"))
conn.simple_bind_s(self.base_dn, self.get_password(raise_exception=False))
except ImportError:
msg = """
<div>
{{_("Seems ldap is not installed on system.<br> | Guidelines to install ldap dependancies and python package")}},
<a href="https://discuss.erpnext.com/t/frappe-v-7-1-beta-ldap-dependancies/15841" target="_blank">{{_("Click here")}}</a>,
</div>
"""
frappe.throw(msg, title=_("LDAP Not Installed"))
except ldap.LDAPError:
conn.unbind_s()
frappe.throw(_("Incorrect UserId or Passwo | rd"))
def get_ldap_settings():
try:
settings = frappe.get_doc("LDAP Settings")
settings.update({
"method": "frappe.integrations.doctype.ldap_settings.ldap_settings.login"
})
return settings
except Exception:
# this will return blank settings
return frappe._dict()
@frappe.whitelist(allow_guest=True)
def login():
#### LDAP LOGIN LOGIC #####
args = frappe.form_dict
user = authenticate_ldap_user(frappe.as_unicode(args.usr), frappe.as_unicode(args.pwd))
frappe.local.login_manager.user = user.name
frappe.local.login_manager.post_login()
# because of a GET request!
frappe.db.commit()
def authenticate_ldap_user(user=None, password=None):
dn = None
params = {}
settings = get_ldap_settings()
try:
import ldap
except:
msg = """
<div>
{{_("Seems ldap is not installed on system.")}}<br>
<a href"https://discuss.erpnext.com/t/frappe-v-7-1-beta-ldap-dependancies/15841">{{_("Click here")}}</a>,
{{_("Guidelines to install ldap dependancies and python")}}
</div>
"""
frappe.throw(msg, title=_("LDAP Not Installed"))
conn = ldap.initialize(settings.ldap_server_url)
try:
try:
# set TLS settings for secure connection
if settings.ssl_tls_mode == 'StartTLS':
conn.set_option(ldap.OPT_X_TLS_DEMAND, True)
if settings.require_trusted_certificate == 'Yes':
conn.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_DEMAND)
conn.start_tls_s()
except:
frappe.throw(_("StartTLS is not supported"))
# simple_bind_s is synchronous binding to server, it takes two param DN and password
conn.simple_bind_s(settings.base_dn, settings.get_password(raise_exception=False))
#search for surnames beginning with a
#available options for how deep a search you want.
#LDAP_SCOPE_BASE, LDAP_SCOPE_ONELEVEL,LDAP_SCOPE_SUBTREE,
result = conn.search_s(settings.organizational_unit, ldap.SCOPE_SUBTREE,
settings.ldap_search_string.format(user))
for dn, r in result:
dn = cstr(dn)
params["email"] = cstr(r[settings.ldap_email_field][0])
params["username"] = cstr(r[settings.ldap_username_field][0])
params["first_name"] = cstr(r[settings.ldap_first_name_field][0])
if dn:
conn.simple_bind_s(dn, frappe.as_unicode(password))
return create_user(params)
else:
frappe.throw(_("Not a valid LDAP user"))
except ldap.LDAPError:
conn.unbind_s()
frappe.throw(_("Incorrect UserId or Password"))
def create_user(params):
if frappe.db.exists("User", params["email"]):
return frappe.get_doc("User", params["email"])
else:
params.update({
"doctype": "User",
"send_welcome_email": 0,
"language": "",
"user_type": "System User",
"roles": [{
"role": _("Blogger")
}]
})
user = frappe.get_doc(params).insert(ignore_permissions=True)
frappe.db.commit()
return user
|
theoneandonly-vector/LaZagne | Windows/src/LaZagne/config/header.py | Python | lgpl-3.0 | 1,270 | 0.030709 | import logging
from colorama import init, Fore, Back, Style
class Header():
def __init__(self):
init() # for colorama
def first_title(self):
init()
print Style.BRIGHT + Fore.WHITE
print '|====================================================================|'
print '| |'
print '| The LaZagne Project |'
print '| |'
print '| ! BANG BANG ! |'
print '| |'
print '|====================================================================|'
print Style.RESET_ALL
# info option for the logging
def title(self, title):
print Style.BRIGHT + Fore.WHITE + '------------------- ' | + title + ' passwords -----------------\n' + Style.RESET_ALL
# Subtitle
def title1(self, title1):
print S | tyle.BRIGHT + Fore.WHITE + '[*] ' + title1 + '\n' + Style.RESET_ALL
# debug option for the logging
def title_info(self, title):
logging.info(Style.BRIGHT + Fore.WHITE + '------------------- ' + title + ' passwords -----------------\n' + Style.RESET_ALL)
|
dmeulen/home-assistant | homeassistant/config.py | Python | mit | 11,390 | 0.000176 | """Module to help with parsing and generating configuration files."""
import asyncio
import logging
import os
import shutil
from types import MappingProxyType
# pylint: disable=unused-import
from typing import Any, Tuple # NOQA
import voluptuous as vol
from homeassistant.const import (
CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, CONF_UNIT_SYSTEM,
CONF_TIME_ZONE, CONF_CUSTOMIZE, CONF_ELEVATION, CONF_UNIT_SYSTEM_METRIC,
CONF_UNIT_SYSTEM_IMPERIAL, CONF_TEMPERATURE_UNIT, TEMP_CELSIUS,
__version__)
from homeassistant.core import valid_entity_id
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util.yaml import load_yaml
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import set_customize
from homeassistant.util import dt as date_util, location as loc_util
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
_LOGGER = logging.getLogger(__name__)
YAML_CONFIG_FILE = 'configuration.yaml'
VERSION_FILE = '.HA_VERSION'
CONFIG_DIR_NAME = '.homeassistant'
DEFAULT_CORE_CONFIG = (
# Tuples (attribute, default, auto detect property, description)
(CONF_NAME, 'Home', None, 'Name of the location where Home Assistant is '
'running'),
(CONF_LATITUDE, 0, 'latitude', 'Location required to calculate the time'
' the sun rises and sets'),
(CONF_LONGITUDE, 0, 'longitude', None),
(CONF_ELEVATION, 0, None, 'Impacts weather/sunrise data'
' (altitude above sea level in meters)'),
(CONF_UNIT_SYSTEM, CONF_UNIT_SYSTEM_METRIC, None,
'{} for Metric, {} for Imperial'.format(CONF_UNIT_SYSTEM_METRIC,
CONF_UNIT_SYSTEM_IMPERIAL)),
(CONF_TIME_ZONE, 'UTC', 'time_zone', 'Pick yours from here: http://en.wiki'
'pedia.org/wiki/List_of_tz_database_time_zones'),
) # type: Tuple[Tuple[str, Any, Any, str], ...]
DEFAULT_CONFIG = """
# Show links to resources in log and frontend
introduction:
# Enables the frontend
frontend:
http:
# Uncomment this to add a password (recommended!)
# api_password: PASSWORD
# Checks for available updates
updater:
# Discover some devices automatically
discovery:
# Allows you to issue voice commands from the frontend in enabled browsers
conversation:
# Enables support for tracking state changes over time.
history:
# View all events in a logbook
logbook:
# Track the sun
sun:
# Weather Prediction
sensor:
platform: yr
"""
def _valid_customize(value):
"""Config validator for customize."""
if not isinstance(value, dict):
raise vol.Invalid('Expected dictionary')
for key, val in value.items():
if not valid_entity_id(key):
raise vol.Invalid('Invalid entity ID: {}'.format(key))
if not isinstance(val, dict):
raise vol.Invalid('Value of {} is not a dictionary'.format(key))
return value
CORE_CONFIG_SCHEMA = vol.Schema({
CONF_NAME: vol.Coerce(str),
CONF_LATITUDE: cv.latitude,
CONF_LONGITUDE: cv.longitude,
CONF_ELEVATION: vol.Coerce(int),
vol.Optional(CONF_TEMPERATURE_UNIT): cv.temperature_unit,
CONF_UNIT_SYSTEM: cv.unit_system,
CONF_TIME_ZONE: cv.time_zone,
vol.Required(CONF_CUSTOMIZE,
default=MappingProxyType({})): _valid_customize,
})
def get_default_config_dir() -> str:
"""Put together the default configuration directory based on OS."""
data_dir = os.getenv('APPDATA') if os.name == "nt" \
else os.path.expanduser('~')
return os.path.join(data_dir, CONFIG_DIR_NAME)
def ensure_config_exists(config_dir: str, detect_location: bool=True) -> str:
"""Ensure a config file exists in given configuration directory.
Creating a default one if needed.
Return path to the config file.
"""
config_path = find_config_file(config_dir)
if config_path is None:
print("Unable to find configuration. Creating default one in",
config_dir)
config_path = create_default_config(config_dir, detect_location)
return config_path
def create_default_config(config_dir, detect_location=True):
"""Create a default configuration file in given configuration directory.
Return path to new config file if success, None if failed.
This method needs to run in an executor.
"""
config_path = os.path.join(config_dir, YAML_CONFIG_FILE)
version_path = os.path.join(config_dir, VERSION_FILE)
info = {attr: default for attr, default, _, _ in DEFAULT_CORE_CONFIG}
location_info = detect_location and loc_util.detect_location_info()
if location_info:
if location_info.use_metric:
info[CONF_UNIT_SYSTEM] = CONF_UNIT_SYSTEM_METRIC
else:
info[CONF_UNIT_SYSTEM] = CONF_UNIT_SYSTEM_IMPERIAL
for attr, default, prop, _ in DEFAULT_CORE_CONFIG:
if prop is None:
continue
info[attr] = getattr(location_info, prop) or default
if location_info.latitude and location_info.longitude:
info[CONF_ELEVATION] = loc_util.elevation(location_info.latitude,
location_info.longitude)
# Writing files with YAML does not create the most human readable results
# So we're hard coding a YAML template.
try:
with open(config_path, 'w') as config_file:
config_file.write("homeassistant:\n")
for attr, _, _, description in DEFAULT_CORE_CONFIG:
if info[attr] is None:
continue
elif description:
config_file.write(" # {}\n".format(description))
config_file.write(" {}: {}\n".format(attr, info[attr]))
config_file.write(DEFAULT_CONFIG)
with open(version_path, 'wt') as version_file:
version_file.write(__version__)
return config_path
except IOError:
print('Unable to create default configuration file', config_path)
return None
@asyncio.coroutine
def async_hass_config_yaml(hass):
"""Load YAML from hass config File.
This function allow component inside asyncio loop to reload his config by
self.
This method is a coroutine.
"""
def _load_hass_yaml_config():
path = find_config_file(hass.config.config_dir)
conf = load_yaml_config_file(path)
return conf
conf = yield from hass.loop.run_in_executor(None, _load_hass_yaml_config)
return conf
def find_config_file(config_dir):
"""Look in given directory for supported configuration files.
Async friendly.
"""
config_path = os.path.join(config_dir, YAML_CONFIG_FILE)
return config_path if os.path.isfile(config_path) else None
def load_yaml_config_file(config_path):
"""Parse a YAML configuration file.
This method needs to run in an executor.
"""
conf_dict = load_yaml(config_path)
if not isinstanc | e(conf_dict, dict):
msg = 'The configuration file {} does not contain a dictionary'.format(
os.path.basename(config_path))
_LOGGER.error(msg)
raise HomeAssistantError(msg)
return conf_dict
def process_ha_config_upgrade(hass):
"""Upgrade config if necessary.
This method needs to run in an executor.
"""
version_path = hass.config.path(VERSION_FILE)
try:
with open(version_path, 'rt') as inp:
conf | _version = inp.readline().strip()
except FileNotFoundError:
# Last version to not have this file
conf_version = '0.7.7'
if conf_version == __version__:
return
_LOGGER.info('Upgrading config directory from %s to %s', conf_version,
__version__)
lib_path = hass.config.path('deps')
if os.path.isdir(lib_path):
shutil.rmtree(lib_path)
with open(version_path, 'wt') as outp:
outp.write(__version__)
@asyncio.coroutine
def async_process_ha_core_config(hass, config):
"""Process the [homeassistant] section from the config.
This method is a coroutine.
"""
config = CORE_CONFIG_SCHEMA(config)
hac = hass.config
def set_time_zone(time_zo |
tylerlaberge/PyPattyrn | tests/behavioral_tests/test_iterator.py | Python | mit | 2,826 | 0 | from unittest import TestCase
from pypattyrn.behavioral.iterator import Iterable, Iterator
class IterableTestCase(TestCase):
"""
Unit testing class for the Iterable class.
"""
def setUp(self):
"""
Initialize testing data.
"""
class Counter(Iterable):
def __init__(self, max):
self.count = 0
self.max = max
def __next__(self):
self.count += 1
if self.count > self.max:
raise StopIteration
else:
return self.count - 1
self.counter_class = Counter
def test_next(self):
"""
Test the iterables next method.
@raise AssertionError: If the test fails.
"""
counter = self.counter_class(10)
for i in range(10):
self.assertEquals(i, counter.__next__())
def test_stop_iteration(self):
"""
Test that StopIteration is raised.
@raise AssertionError: If the test fails.
"""
counter = self.counter_class(0)
with self.assertRaises(StopIteration):
counter.__next__()
class IteratorTestCase(TestCase):
"""
Unit testing class for the Iterator class.
"""
def setUp(self):
"""
Initialize testing data.
"""
class Counter(Iterable):
def __init__(self, max):
self.count = 0
self.max = max
def __next__(self):
self.count += 1
if self.count > self.max:
raise StopIteration
else:
return self.count - 1
class CounterIterator(Iterator):
def __init__(self):
super().__init__(Counter(10))
self.counter_iterator_class = CounterIterator
def test_next(self):
"""
Test the built in next method on the Iterator.
@raise AssertionError: If the test fails.
"""
counter_iterator = self.counter_iterator_class()
for i in range(10):
| self.assertEquals(i, next(counter_iterator))
def test_s | top_iteration(self):
"""
Test that stop iteration is raised.
@raise AssertionError: If the test fails.
"""
counter_iterator = self.counter_iterator_class()
with self.assertRaises(StopIteration):
for i in range(12):
next(counter_iterator)
def test_loop(self):
"""
Test looping over an Iterator class.
@raise AssertionError: If the test fails.
"""
counter_iterator = self.counter_iterator_class()
i = 0
for count in counter_iterator:
self.assertEquals(i, count)
i += 1
|
jli05/cs224n-project | optimisers.py | Python | mit | 5,490 | 0.000911 | from __future__ import (division, absolute_import,
print_function, unicode_literals)
import numpy
import theano
import theano.tensor as tensor
profile=False
def itemlist(tparams):
return [v for k, v in tparams.items()]
# name(hyperp, tparams, grads, inputs (list), cost) = f_grad_shared, f_update
def adam(lr, tparams, grads, inp, cost):
gshared = [theano.shared(p.get_value() * 0.,
name='%s_grad' % k)
for k, p in tparams.items()]
gsup = [(gs, g) for gs, g in zip(gshared, grads)]
f_grad_shared = theano.function(inp, cost,
updates=gsup, profile=profile,
allow_input_downcast=True)
lr0 = 0.0002
b1 = 0.1
b2 = 0.001
e = 1e-8
updates = []
i = theano.shared(numpy.float32(0.))
i_t = i + 1.
fix1 = 1. - b1**(i_t)
fix2 = 1. - b2**(i_t)
lr_t = lr0 * (tensor.sqrt(fix2) / fix1)
for p, g in zip(tparams.values(), gshared):
m = theano.shared(p.get_value() * 0.)
v = theano.shared(p.get_value() * 0.)
m_t = (b1 * g) + ((1. - b1) * m)
v_t = (b2 * tensor.sqr(g)) + ((1. - b2) * v)
g_t = m_t / (tensor.sqrt(v_t) + e)
p_t = p - (lr_t * g_t)
updates.append((m, m_t))
updates.append((v, v_t))
updates.append((p, p_t))
updates.append((i, i_t))
f_update = theano.function([lr], [], updates=updates,
on_unused_input='ignore', profile=profile,
allow_input_downcast=True)
return f_grad_shared, f_update
def adadelta(lr, tparams, grads, inp, cost):
zipped_grads = [theano.shared(p.get_value() * numpy.float32(0.),
name='%s_grad' % k)
for k, p in tparams.items()]
running_up2 = [theano.shared(p.get_value() * numpy.float32(0.),
name='%s_rup2' % k)
for k, p in tparams.items()]
running_grads2 = [theano.shared(p.get_value() * numpy.float32(0.),
name='%s_rgrad2' % k)
for k, p in tparams.items()]
zgup = [(zg, g) for zg, g in zip(zipped_grads, grads)]
rg2up = [(rg2, 0.95 * rg2 + 0.05 * (g ** 2))
for rg2, g in zip(running_grads2, grads)]
f_grad_shared = theano.function(inp, cost, updates=zgup+rg2up,
profile=profile,
allow_input_downcast=True)
updir = [-tensor.sqrt(ru2 + 1e-6) / tensor.sqrt(rg2 + 1e-6) * zg
for zg, ru2, rg2 in
zip(zipped_grads, running_up2, running_grads2)]
ru2up = [(ru2, 0.95 * ru2 + 0.05 * (ud ** 2))
| for ru | 2, ud in zip(running_up2, updir)]
param_up = [(p, p + ud) for p, ud in zip(itemlist(tparams), updir)]
f_update = theano.function([lr], [], updates=ru2up+param_up,
on_unused_input='ignore', profile=profile,
allow_input_downcast=True)
return f_grad_shared, f_update
def rmsprop(lr, tparams, grads, inp, cost):
zipped_grads = [theano.shared(p.get_value() * numpy.float32(0.),
name='%s_grad' % k)
for k, p in tparams.items()]
running_grads = [theano.shared(p.get_value() * numpy.float32(0.),
name='%s_rgrad' % k)
for k, p in tparams.items()]
running_grads2 = [theano.shared(p.get_value() * numpy.float32(0.),
name='%s_rgrad2' % k)
for k, p in tparams.items()]
zgup = [(zg, g) for zg, g in zip(zipped_grads, grads)]
rgup = [(rg, 0.95 * rg + 0.05 * g) for rg, g in zip(running_grads, grads)]
rg2up = [(rg2, 0.95 * rg2 + 0.05 * (g ** 2))
for rg2, g in zip(running_grads2, grads)]
f_grad_shared = theano.function(inp, cost, updates=zgup+rgup+rg2up,
profile=profile,
allow_input_downcast=True)
updir = [theano.shared(p.get_value() * numpy.float32(0.),
name='%s_updir' % k)
for k, p in tparams.iteritems()]
updir_new = [(ud, 0.9 * ud - 1e-4 * zg / tensor.sqrt(rg2 - rg ** 2 + 1e-4))
for ud, zg, rg, rg2 in zip(updir, zipped_grads, running_grads,
running_grads2)]
param_up = [(p, p + udn[1])
for p, udn in zip(itemlist(tparams), updir_new)]
f_update = theano.function([lr], [], updates=updir_new+param_up,
on_unused_input='ignore', profile=profile,
allow_input_downcast=True)
return f_grad_shared, f_update
def sgd(lr, tparams, grads, x, mask, y, cost):
gshared = [theano.shared(p.get_value() * 0., name='%s_grad' % k)
for k, p in tparams.items()]
gsup = [(gs, g) for gs, g in zip(gshared, grads)]
f_grad_shared = theano.function([x, mask, y], cost, updates=gsup,
profile=profile,
allow_input_downcast=True)
pup = [(p, p - lr * g) for p, g in zip(itemlist(tparams), gshared)]
f_update = theano.function([lr], [], updates=pup, profile=profile,
allow_input_downcast=True)
return f_grad_shared, f_update
|
HigorSilvaRosa/ForumGeolocalizado | core/models.py | Python | mit | 2,327 | 0.006917 | # coding=utf-8
from django.contrib.auth.models import User
from django.db import models
# Create your models here.
from django.db.models.base import Model
from django.db.models.fields import CharField, TextField, DateTimeField, BooleanField, FloatField
from django.db.models.fields.related import ForeignKey
class BaseModel(Model):
is_active = BooleanField(default=True)
creation_date = DateTimeField(auto_now_add=True, verbose_name=u"Data de criação")
modification_date = DateTimeField(auto_now=True, verbose_name=u"Data de modificação")
class Meta:
abstract = True
class UserCoordinates(Model):
user = ForeignKey(User, verbose_name=u"Usuário")
date = DateTimeField(auto_now_add=True, verbose_name=u"Data e hora")
latitude = FloatField(verbose_name=u"Latitude")
longitude = FloatField(verbose_name=u"Longitude")
class Meta:
verbose_name = u"Coordenadas de usuário"
verbose_name_plural = u"Coordenadas de usuário"
class Topic(BaseModel):
name = CharField(max_length=150, verbose_name=u"Nome")
user = ForeignKey(User, verbose_name=u"Criador")
latitude = FloatField(verbose_name=u"Latitude", default=0)
longitude = FloatField(verbose_name=u" | Longitude", default=0)
class Meta:
verbose_name = u"Tópico"
verbose_name_plural = u"Tópicos"
def __unicode__(self):
return self.name
class Post(BaseModel):
topic = ForeignKey(Topic, relate | d_name="posts", verbose_name=u"Tópico")
user = ForeignKey(User, verbose_name=u"Criador")
content = TextField(verbose_name=u"Texto")
class Meta:
verbose_name = u"Postagem"
verbose_name_plural = u"Postagens"
ordering = ["-id"]
class TopicReport(BaseModel):
post=ForeignKey(Topic, related_name="reports", verbose_name=u"Post")
user=ForeignKey(User, related_name="topic_reports", verbose_name=u"Usuário")
class Meta:
verbose_name = u"Tópico reportado"
verbose_name_plural = u"Tópicos reportados"
class PostReport(BaseModel):
post=ForeignKey(Post, related_name="reports", verbose_name=u"Post")
user=ForeignKey(User, related_name="post_reports", verbose_name=u"Usuário")
class Meta:
verbose_name = u"Postagem reportada"
verbose_name_plural = u"Postagens reportadas" |
magus424/powerline | powerline/lib/inotify.py | Python | mit | 6,081 | 0.024009 | # vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import os
import errno
import ctypes
import struct
from ctypes.util import find_library
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
class INotifyError(Exception):
pass
_inotify = No | ne
def load_inotify():
''' Initialize the inotify library '''
global _inotify
if _inotify is None:
if hasattr(sys, 'getwindowsversion'):
# On windows abort before loading the C library. Windows has
# multiple, incompatible C runtimes, and we have no way of knowing
# if the one chosen by ctypes is compatible with the currently
# loaded one.
raise INotifyError('INotify not available on windows')
if s | ys.platform == 'darwin':
raise INotifyError('INotify not available on OS X')
if not hasattr(ctypes, 'c_ssize_t'):
raise INotifyError('You need python >= 2.7 to use inotify')
name = find_library('c')
if not name:
raise INotifyError('Cannot find C library')
libc = ctypes.CDLL(name, use_errno=True)
for function in ("inotify_add_watch", "inotify_init1", "inotify_rm_watch"):
if not hasattr(libc, function):
raise INotifyError('libc is too old')
# inotify_init1()
prototype = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int, use_errno=True)
init1 = prototype(('inotify_init1', libc), ((1, "flags", 0),))
# inotify_add_watch()
prototype = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int, ctypes.c_char_p, ctypes.c_uint32, use_errno=True)
add_watch = prototype(('inotify_add_watch', libc), (
(1, "fd"), (1, "pathname"), (1, "mask")))
# inotify_rm_watch()
prototype = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int, ctypes.c_int, use_errno=True)
rm_watch = prototype(('inotify_rm_watch', libc), (
(1, "fd"), (1, "wd")))
# read()
prototype = ctypes.CFUNCTYPE(ctypes.c_ssize_t, ctypes.c_int, ctypes.c_void_p, ctypes.c_size_t, use_errno=True)
read = prototype(('read', libc), (
(1, "fd"), (1, "buf"), (1, "count")))
_inotify = (init1, add_watch, rm_watch, read)
return _inotify
class INotify(object):
# See <sys/inotify.h> for the flags defined below
# Supported events suitable for MASK parameter of INOTIFY_ADD_WATCH.
ACCESS = 0x00000001 # File was accessed.
MODIFY = 0x00000002 # File was modified.
ATTRIB = 0x00000004 # Metadata changed.
CLOSE_WRITE = 0x00000008 # Writtable file was closed.
CLOSE_NOWRITE = 0x00000010 # Unwrittable file closed.
OPEN = 0x00000020 # File was opened.
MOVED_FROM = 0x00000040 # File was moved from X.
MOVED_TO = 0x00000080 # File was moved to Y.
CREATE = 0x00000100 # Subfile was created.
DELETE = 0x00000200 # Subfile was deleted.
DELETE_SELF = 0x00000400 # Self was deleted.
MOVE_SELF = 0x00000800 # Self was moved.
# Events sent by the kernel.
UNMOUNT = 0x00002000 # Backing fs was unmounted.
Q_OVERFLOW = 0x00004000 # Event queued overflowed.
IGNORED = 0x00008000 # File was ignored.
# Helper events.
CLOSE = (CLOSE_WRITE | CLOSE_NOWRITE) # Close.
MOVE = (MOVED_FROM | MOVED_TO) # Moves.
# Special flags.
ONLYDIR = 0x01000000 # Only watch the path if it is a directory.
DONT_FOLLOW = 0x02000000 # Do not follow a sym link.
EXCL_UNLINK = 0x04000000 # Exclude events on unlinked objects.
MASK_ADD = 0x20000000 # Add to the mask of an already existing watch.
ISDIR = 0x40000000 # Event occurred against dir.
ONESHOT = 0x80000000 # Only send event once.
# All events which a program can wait on.
ALL_EVENTS = (
ACCESS | MODIFY | ATTRIB | CLOSE_WRITE | CLOSE_NOWRITE | OPEN |
MOVED_FROM | MOVED_TO | CREATE | DELETE | DELETE_SELF | MOVE_SELF
)
# See <bits/inotify.h>
CLOEXEC = 0x80000
NONBLOCK = 0x800
def __init__(self, cloexec=True, nonblock=True):
self._init1, self._add_watch, self._rm_watch, self._read = load_inotify()
flags = 0
if cloexec:
flags |= self.CLOEXEC
if nonblock:
flags |= self.NONBLOCK
self._inotify_fd = self._init1(flags)
if self._inotify_fd == -1:
raise INotifyError(os.strerror(ctypes.get_errno()))
self._buf = ctypes.create_string_buffer(5000)
self.fenc = sys.getfilesystemencoding() or 'utf-8'
self.hdr = struct.Struct(b'iIII')
if self.fenc == 'ascii':
self.fenc = 'utf-8'
# We keep a reference to os to prevent it from being deleted
# during interpreter shutdown, which would lead to errors in the
# __del__ method
self.os = os
def handle_error(self):
eno = ctypes.get_errno()
extra = ''
if eno == errno.ENOSPC:
extra = 'You may need to increase the inotify limits on your system, via /proc/sys/inotify/max_user_*'
raise OSError(eno, self.os.strerror(eno) + str(extra))
def __del__(self):
# This method can be called during interpreter shutdown, which means we
# must do the absolute minimum here. Note that there could be running
# daemon threads that are trying to call other methods on this object.
try:
self.os.close(self._inotify_fd)
except (AttributeError, TypeError):
pass
def close(self):
if hasattr(self, '_inotify_fd'):
self.os.close(self._inotify_fd)
del self.os
del self._add_watch
del self._rm_watch
del self._inotify_fd
def read(self, get_name=True):
buf = []
while True:
num = self._read(self._inotify_fd, self._buf, len(self._buf))
if num == 0:
break
if num < 0:
en = ctypes.get_errno()
if en == errno.EAGAIN:
break # No more data
if en == errno.EINTR:
continue # Interrupted, try again
raise OSError(en, self.os.strerror(en))
buf.append(self._buf.raw[:num])
raw = b''.join(buf)
pos = 0
lraw = len(raw)
while lraw - pos >= self.hdr.size:
wd, mask, cookie, name_len = self.hdr.unpack_from(raw, pos)
pos += self.hdr.size
name = None
if get_name:
name = raw[pos:pos + name_len].rstrip(b'\0').decode(self.fenc)
pos += name_len
self.process_event(wd, mask, cookie, name)
def process_event(self, *args):
raise NotImplementedError()
|
roberzguerra/scout_mez | institutional/admin.py | Python | gpl-2.0 | 3,528 | 0.003975 | # -*- coding:utf-8 -*-
from copy import deepcopy
from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from django import forms
from django.template.defaultfilters import slugify
from filebrowser_safe.fields import FileBrowseFormField, FileBrowseWidget, FileBrowseField
from mezzanine.conf import settings
from mezzanine.blog.models import BlogPost
from mezzanine.forms import fields
from mezzanine.pages.models import Page
from mezzanine.core.admin import DisplayableAdminForm, TabularDynamicInlineAdmin
from mezzanine.pages.admin import PageAdmin, PageAdminForm
from mezzanine.blog.admin import BlogPostAdmin, blogpost_fieldsets
from mezzanine.utils.models import upload_to
from models import Team, ScoutGroupPage, HomePage, Slide, SocialLinks
from scout_core.admin import page_fieldsets
class BlogPostAdminForm(DisplayableAdminForm):
"""
Form customizado para o BlogPost
Seta o atributo "Exibir no sitemap" como False e não obrigatorio
"""
in_sitemap = forms.BooleanField(label=_(u"Show in sitemap"), required=False, initial=False)
def __init__(self, *args, **kwargs):
super(BlogPostAdminForm, self).__init__(*args, **kwargs)
self.fields['featured_image'].label = _(u"Imagem destaque")
| self.fields['featured_image'].help_text = _(u"Imagem destaque da notícia, resolução mínima 460x260px ou proporcional.")
self.fields['image_top'].directory = upload_to("blog.BlogPost.featured_image", "blog")
blogpost_fieldsets[0][1]["fields"].insert(4, "image_top")
BlogPostAdmin.form = BlogPostAdminForm
admin.site.unregister(BlogPost)
admin.site.register(Blo | gPost, BlogPostAdmin)
class PageAdminInstitutionalForm(PageAdminForm):
"""
Form customizado para Paginas do Site
Seta o atributo "Exibir no sitemap" como False e não obrigatorio
"""
in_sitemap = forms.BooleanField(label=_(u"Show in sitemap"), required=False, initial=False)
PageAdmin.form = PageAdminInstitutionalForm
admin.site.unregister(Page)
admin.site.register(Page, PageAdmin)
class SocialLinkInline(TabularDynamicInlineAdmin):
model = SocialLinks
class SlideInline(TabularDynamicInlineAdmin):
model = Slide
class HomePageAdmin(PageAdmin):
"""
Admin para a HomePage
"""
filter_horizontal = ("blog_posts", "teams", )
inlines = [SlideInline, SocialLinkInline, ]
# admin_classes_with_slides = [HomePageAdmin, ] #FormAdmin, GalleryAdmin]
# for admin_class in admin_classes_with_slides:
# setattr(admin_class, 'inlines', list(admin_class.inlines) + [SlideInline])
team_fields = deepcopy(page_fieldsets)
team_fields[0][1]["fields"].insert(5, u"categories")
class TeamAdmin(PageAdmin):
"""
Admin para a Pagina de Equipes
"""
fieldsets = team_fields
#fieldsets = ((None, {"fields": ("title",)}),)
# def in_menu(self):
# """
# Hide from the admin menu unless explicitly set in ``ADMIN_MENU_ORDER``.
# """
# for (name, items) in settings.ADMIN_MENU_ORDER:
# if "people.PersonCategory" in items:
# return True
# return False
scout_core_page_fields = deepcopy(page_fieldsets)
scout_core_page_fields[0][1]["fields"].insert(5, u"type")
class ScoutGroupPageAdmin(PageAdmin):
"""
Admin para a Pagina de Grupos e Distritos
"""
fieldsets = scout_core_page_fields
admin.site.register(HomePage, HomePageAdmin)
admin.site.register(Team, TeamAdmin)
admin.site.register(ScoutGroupPage, ScoutGroupPageAdmin)
|
IT-PM-OpenAdaptronik/Webapp | apps/register/urls.py | Python | mit | 1,410 | 0.000709 | """ License
MIT License
Copyright (c) 2017 OpenAdaptronik
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublic | ense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from django.urls import path
from .views import IndexView, register_success, register_activate
app_name = 'register'
urlpatterns = [
path('activate/<uidb64>/<slug:token>/', register_activate, name='activate'),
path('success/', register_success, name='success'),
path('', IndexView.as_view(), name='index'),
]
|
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/fs/commands/fstree.py | Python | agpl-3.0 | 2,201 | 0.009995 | #!/usr/bin/env python
import sys
from fs.opener import opener
from fs.commands.runner import Command
from fs.utils import print_fs
class FSTree(Command):
usage = """fstree [OPTION]... [PATH]
Recursively display the contents of PATH in an ascii tree"""
def get_optparse(self):
optparse = super(FSTree, self).get_optparse()
optparse.add_option('-l', '--level', dest='depth', type="int", default=5,
help="Descend only LEVEL directories deep (-1 for infinite)", metavar="LEVEL")
optparse.add_option('-g', '--gui', dest='gui', action='store_true', default=False,
help="browse the tree with a gui")
optparse.add_option('-a', '--all', dest='all', action='store_true', default=False,
help="do not hide dot files")
optparse.add_option('-d', '--dirsfirst', dest='dirsfirst', action='store_true', default=False,
help="List directories before files")
return optparse
def do_run(self, options, args):
if not args:
args = ['.']
for fs, path, is_dir in self.get_resources(args, single=True):
if not is_dir:
self.error(u"'%s' is not a dir\n" % path)
return 1
fs.cache_hint(True)
| if options.gui:
from fs.browsewin import browse
if path:
| fs = fs.opendir(path)
browse(fs, hide_dotfiles=not options.all)
else:
if options.depth < 0:
max_levels = None
else:
max_levels = options.depth
print_fs(fs, path or '',
file_out=self.output_file,
max_levels=max_levels,
terminal_colors=self.terminal_colors,
hide_dotfiles=not options.all,
dirs_first=options.dirsfirst)
def run():
return FSTree().run()
if __name__ == "__main__":
sys.exit(run())
|
bslatkin/pycon2014 | lib/asyncio-0.4.1/tests/test_futures.py | Python | apache-2.0 | 11,378 | 0 | """Tests for futures.py."""
import concurrent.futures
import threading
import unittest
import unittest.mock
import asyncio
from asyncio import test_utils
def _fakefunc(f):
return f
class FutureTests(unittest.TestCase):
def setUp(self):
self.loop = test_utils.TestLoop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def test_initial_state(self):
f = asyncio.Future(loop=self.loop)
self.assertFalse(f.cancelled())
self.assertFalse(f.done())
f.cancel()
self.assertTrue(f.cancelled())
def test_init_constructor_default_loop(self):
try:
asyncio.set_event_loop(self.loop)
f = asyncio.Future()
self.assertIs(f._loop, self.loop)
finally:
asyncio.set_event_loop(None)
def test_constructor_positional(self):
# Make sure Future doesn't accept a positional argument
self.assertRaises(TypeError, asyncio.Future, 42)
def test_cancel(self):
f = asyncio.Future(loop=self.loop)
self.assertTrue(f.cancel())
self.assertTrue(f.cancelled())
self.assertTrue(f.done())
self.assertRaises(asyncio.CancelledError, f.result)
self.assertRaises(asyncio.CancelledError, f.exception)
self.assertRaises(asyncio.InvalidStateError, f.set_result, None)
self.assertRaises(asyncio.InvalidStateError, f.set_exception, None)
self.assertFalse(f.cancel())
def test_result(self):
f = asyncio.Future(loop=self.loop)
self.assertRaises(asyncio.InvalidStateError, f.result)
f.set_result(42)
self.assertFalse(f.cancelled())
self.assertTrue(f.done())
self.assertEqual(f.result(), 42)
self.assertEqual(f.exception(), None)
self.assertRaises(asyncio.InvalidStateError, f.set_result, None)
self.assertRaises(asyncio.InvalidStateError, f.set_exception, None)
self.assertFalse(f.cancel())
def test_exception(self):
exc = RuntimeError()
f = asyncio.Future(loop=self.loop)
self.assertRaises(asyncio.InvalidStateError, f.exception)
f.set_exception(exc)
self.assertFalse(f.cancelled())
self.assertTrue(f.done())
self.assertRaises(RuntimeError, f.result)
self.assertEqual(f.exception(), exc)
self.assertRaises(asyncio.InvalidStateError, f.set_result, None)
self.assertRaises(asyncio.InvalidStateError, f.set_exception, None)
self.assertFalse(f.cancel())
def test_exception_class(self):
f = asyncio.Future(loop=self.loop)
f.set_exception(RuntimeError)
self.assertIsInstance(f.exception(), RuntimeError)
def test_yield_from_twice(self):
f = asyncio.Future(loop=self.loop)
def fixture():
yield 'A'
x = yield from f
yield 'B', x
y = yield from f
yield 'C', y
g = fixture()
self.assertEqual(next(g), 'A') # yield 'A'.
self.assertEqual(next(g), f) # First yield from f.
f.set_result(42)
self.assertEqual(next(g), ('B', 42)) # yield 'B', x.
# The second "yield from f" does not yield f.
self.assertEqual(next(g), ('C', 42)) # yield 'C', y.
def test_repr(self):
f_pending = asyncio.Future(loop=self.loop)
self.assertEqual(repr(f_pending), 'Future<PENDING>')
f_pending.cancel()
f_cancelled = asyncio.Future(loop=self.loop)
f_cancelled.cancel()
self.assertEqual(repr(f_cancelled), 'Future<CANCELLED>')
f_result = asyncio.Future(loop=self.loop)
f_result.set_result(4)
self.assertEqual(repr(f_result), 'Future<result=4>')
self.assertEqual(f_result.result(), 4)
exc = RuntimeError()
f_exception = asyncio.Future(loop=self.loop)
f_exception.set_exception(exc)
self.assertEqual(repr(f_exception), 'Future<exception=RuntimeError()>')
self.assertIs(f_exception.exception(), exc)
f_few_callbacks = asyncio.Future(loop=self.loop)
f_few_callbacks.add_done_callback(_fakefunc)
self.assertIn('Future<PENDING, [<function _fakefunc',
repr(f_few_callbacks))
f_few_callbacks.cancel()
f_many_callbacks = asyncio.Future(loop=self.loop)
for i in range(20):
f_many_callbacks.add_done_callback(_fakefunc)
r = repr(f_many_callbacks)
self.assertIn('Future<PENDING, [<function _fakefunc', r)
self.assertIn('<18 more>', r)
f_many_callbacks.cancel()
def test_copy_state(self):
# Test the internal _copy_state method since it's being directly
# invoked in other modules.
f = asyncio.Future(loop=self.loop)
f.set_result(10)
newf = asyncio.Future(loop=self.loop)
newf._copy_state(f)
self.assertTrue(newf.done())
self.assertEqual(newf.result(), 10)
f_exception = asyncio.Future(loop=self.loop)
f_exception.set_exception(RuntimeError())
newf_exception = asyncio.Future(loop=self.loop)
newf_exception._copy_state(f_exception)
self.assertTrue(newf_exception.done())
self.assertRaises(RuntimeError, newf_exception.result)
f_cancelled = asyncio.Future(loop=self.loop)
f_cancelled.cancel()
newf_cancelled = asyncio.Future(loop=self.loop)
newf_cancelled._copy_state(f_cancelled)
self.assertTrue(newf_cancelled.cancelled())
def test_iter(self):
fut = asyncio.Future(loop=self.loop)
def coro():
yield from fut
def test():
arg1, arg2 = coro()
self.assertRaises(AssertionError, test)
fut.cancel()
@unittest.mock.patch('asyncio.base_events.logger')
def test_tb_logger_abandoned(self, m_log):
fut = asyncio.Future(loop=self.loop)
del fut
self.assertFalse(m_log.error.called)
@unittest.mock.patch('asyncio.base_events.logger')
def test_tb_logger_result_unretrieved(self, m_log):
fut = asyncio.Future(loop=self.loop)
fut.set_result(42)
del fut
self.assertFalse(m_log.error.called)
|
@unittest.mock.patch('asyncio.base_events.logger')
def test_tb_logger_result_retrieved(self, m_log):
fut = asyncio.Future(loop=self.loop)
fut.set_result(42)
fut.result()
del fut
self.assertFalse(m_log.error.called)
@unittest.mock.patch('asyncio.base_events.logger')
def test_tb_logger_exception_unretrieved(self, m_log):
fut = asyncio.Future(loop=self.loop)
fut.set_exception( | RuntimeError('boom'))
del fut
test_utils.run_briefly(self.loop)
self.assertTrue(m_log.error.called)
@unittest.mock.patch('asyncio.base_events.logger')
def test_tb_logger_exception_retrieved(self, m_log):
fut = asyncio.Future(loop=self.loop)
fut.set_exception(RuntimeError('boom'))
fut.exception()
del fut
self.assertFalse(m_log.error.called)
@unittest.mock.patch('asyncio.base_events.logger')
def test_tb_logger_exception_result_retrieved(self, m_log):
fut = asyncio.Future(loop=self.loop)
fut.set_exception(RuntimeError('boom'))
self.assertRaises(RuntimeError, fut.result)
del fut
self.assertFalse(m_log.error.called)
def test_wrap_future(self):
def run(arg):
return (arg, threading.get_ident())
ex = concurrent.futures.ThreadPoolExecutor(1)
f1 = ex.submit(run, 'oi')
f2 = asyncio.wrap_future(f1, loop=self.loop)
res, ident = self.loop.run_until_complete(f2)
self.assertIsInstance(f2, asyncio.Future)
self.assertEqual(res, 'oi')
self.assertNotEqual(ident, threading.get_ident())
def test_wrap_future_future(self):
f1 = asyncio.Future(loop=self.loop)
f2 = asyncio.wrap_future(f1)
self.assertIs(f1, f2)
@unittest.mock.patch('asyncio.futures.events')
def test_wrap_future_use_global_loop(self, m_events):
def run(arg):
r |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.