text stringlengths 4 1.02M | meta dict |
|---|---|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0015_auto_20170105_1917'),
]
operations = [
migrations.AddField(
model_name='teamstatus',
name='systemic_vision',
field=models.CharField(choices=[('NS', 'Not Started'), ('IMS', 'Intro Mail Sent'), ('DA', 'Date Arranged'), ('CH', 'Call Happened')], default='NS', max_length=5, verbose_name='Systemic Vision Status'),
),
migrations.AddField(
model_name='teamstatus',
name='systemic_vision_comment',
field=models.TextField(blank=True, verbose_name='Systemic Vision Comment'),
),
]
| {
"content_hash": "199c4031ede70c46f2bcb8ce332c9a68",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 213,
"avg_line_length": 33.17391304347826,
"alnum_prop": 0.6028833551769331,
"repo_name": "jarifibrahim/ashoka-dashboard",
"id": "5e2985cdc5e43ecd25c99fb0d8c1469bfd114f2c",
"size": "836",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dashboard/migrations/0016_auto_20170106_2126.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "61360"
},
{
"name": "Python",
"bytes": "174912"
}
],
"symlink_target": ""
} |
from django.contrib.auth.models import User
from django.conf import settings
from timestack import facebook
from timestack.models import *
class FacebookBackend:
supports_object_permissions = False
supports_anonymous_user = False
supports_inactive_user = False
def authenticate(self, token=None):
try:
try:
#not first time login, assume not deauth
u = Person.objects.get(access_token=token)
except Person.DoesNotExist:
profile = facebook.GraphAPI(token).get_object("me")
uid = profile['id']
try:
#login first time but face already exist
u = Person.objects.get(uid=uid)
u.access_token=token
#u.user.email=profile['email']
u.user.save()
u.save()
except Person.DoesNotExist:
#login first time and face does not exist
u = Person(uid=uid,access_token=token,profile_url=profile['link'])
user = User(username=uid,first_name=profile['first_name'],last_name=profile['last_name'])
user.set_unusable_password()
user.save()
u.user=user
u.save()
return u.user
except:
return None
#todo handle deauth callback
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None | {
"content_hash": "2e570d30497909102c73d3f15a1e7a79",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 121,
"avg_line_length": 39,
"alnum_prop": 0.4883190883190883,
"repo_name": "flashycud/timestack",
"id": "bc3c619d934bf526aa67fd511220c0e002350909",
"size": "1755",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "timestack/backends.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "48889"
},
{
"name": "JavaScript",
"bytes": "130064"
},
{
"name": "Python",
"bytes": "4067543"
}
],
"symlink_target": ""
} |
from repo import Repo
from Phone import Phone
from exceptions import StoreException
import sys
class UI:
def __init__(self):
"""
Creates the UI class
"""
self.repo = Repo()
def handleAddNewPhone(self):
"""
Adds a phone to the repository
Input: None
"""
manufacturer = input("Input manufacturer: ")
model = input("Input model: ")
price = input("Price: ")
self.repo.addPhone(Phone(manufacturer=manufacturer, model=model, price=price))
def handleFindPhone(self):
"""
Finds a phone in the repository
Input: None
"""
manufacturer = input("Input manufacturer: ")
result = self.repo.findPhoneByManufacturer(manufacturer)
print("-"*49)
print("|{: ^15}|{: ^15}|{: ^15}|".format("Manufacturer", "Model", "Price"))
print("-" * 49)
for phone in result:
print("|{: ^15}|{: ^15}|{: ^15}|".format(phone.getManufacturer(), phone.getModel(), phone.getPrice()))
print("-" * 49)
def handleIncreasePhonePrice(self):
"""
Increases price of a specific phone from the repository
Input: None
"""
manufacturer = input("Input manufacturer: ")
model = input("Input model: ")
value = input("Value: ")
self.repo.increasePrice(manufacturer, model, value)
def handleIncreasePhonesPrice(self):
"""
Increase the price of all phones by a percent
Input: None
"""
value = input("Percent: ")
self.repo.increaseByPercent(value)
def handleListPhones(self):
"""
Lists all phones
Input: None
"""
result = self.repo.getPhones()
print("-"*49)
print("|{: ^15}|{: ^15}|{: ^15}|".format("Manufacturer", "Model", "Price"))
print("-" * 49)
for phone in result:
print("|{: ^15}|{: ^15}|{: ^15}|".format(phone.getManufacturer(), phone.getModel(), phone.getPrice()))
print("-" * 49)
def run(self):
"""
Entry point to UI class
Input: None
"""
print("Phone Store UI")
strings = ["1. Add New Phone",
"2. Find phone",
"3. Increase price of a phone",
"4. Increase price of all phones",
"5. List all phones",
"0. Exit"]
calltable = [lambda: sys.exit(), self.handleAddNewPhone, self.handleFindPhone,
self.handleIncreasePhonePrice,
self.handleIncreasePhonesPrice, self.handleListPhones]
while True:
for entry in strings:
print(entry)
option = input("> ")
if (isInt(option)):
option = int(option)
if option in range(0, 6):
try:
calltable[option]()
except StoreException as ex:
print("[EXCEPTION]:", ex)
input("Press any key to continue")
else:
print("Invalid option")
else:
print("Option was not an integer")
def isInt(value):
"""
Checks if a value is integer
Input: value - the string to be checked if it's an integer or not
Output: True/False
"""
try:
int(value)
return True
except ValueError:
return False
| {
"content_hash": "e484e6fa8c565d66ee8f241a35f5ba25",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 114,
"avg_line_length": 31.24561403508772,
"alnum_prop": 0.5042111173498035,
"repo_name": "Zephyrrus/ubb",
"id": "894599f425608fc4c76ca4cde3f1ea8b27b5bcdd",
"size": "3562",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "YEAR 1/SEM1/FP/LAB/L7 - Partial/ui.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "96"
},
{
"name": "Assembly",
"bytes": "24190"
},
{
"name": "Batchfile",
"bytes": "80"
},
{
"name": "C",
"bytes": "504974"
},
{
"name": "C#",
"bytes": "116117"
},
{
"name": "C++",
"bytes": "406145"
},
{
"name": "CMake",
"bytes": "116836"
},
{
"name": "CSS",
"bytes": "507511"
},
{
"name": "Common Lisp",
"bytes": "4926"
},
{
"name": "Dockerfile",
"bytes": "601"
},
{
"name": "HTML",
"bytes": "774629"
},
{
"name": "Hack",
"bytes": "1348"
},
{
"name": "Java",
"bytes": "225193"
},
{
"name": "JavaScript",
"bytes": "1323357"
},
{
"name": "Kotlin",
"bytes": "80576"
},
{
"name": "M",
"bytes": "812"
},
{
"name": "MATLAB",
"bytes": "14300"
},
{
"name": "Makefile",
"bytes": "62922"
},
{
"name": "PHP",
"bytes": "26576"
},
{
"name": "PLSQL",
"bytes": "3270"
},
{
"name": "PLpgSQL",
"bytes": "73862"
},
{
"name": "Perl 6",
"bytes": "324"
},
{
"name": "Prolog",
"bytes": "5214"
},
{
"name": "Python",
"bytes": "315759"
},
{
"name": "QMake",
"bytes": "5282"
},
{
"name": "Shell",
"bytes": "4089"
},
{
"name": "TSQL",
"bytes": "79222"
},
{
"name": "XSLT",
"bytes": "1953"
},
{
"name": "Yacc",
"bytes": "1718"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from past.builtins import basestring
from future.utils import text_to_native_str
import logging
import subprocess
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.config_schema import one_or_more
from flexget.utils.template import render_from_entry, render_from_task, RenderError
from flexget.utils.tools import io_encoding
log = logging.getLogger('exec')
class EscapingEntry(Entry):
"""Helper class, same as a Entry, but returns all string value with quotes escaped."""
def __init__(self, entry):
super(EscapingEntry, self).__init__(entry)
def __getitem__(self, key):
value = super(EscapingEntry, self).__getitem__(key)
# TODO: May need to be different depending on OS
if isinstance(value, basestring):
value = value.replace('"', '\\"')
return value
class PluginExec(object):
"""
Execute commands
Simple example, xecute command for entries that reach output::
exec: echo 'found {{title}} at {{url}}' > file
Advanced Example::
exec:
on_start:
phase: echo "Started"
on_input:
for_entries: echo 'got {{title}}'
on_output:
for_accepted: echo 'accepted {{title}} - {{url}} > file
You can use all (available) entry fields in the command.
"""
NAME = 'exec'
HANDLED_PHASES = ['start', 'input', 'filter', 'output', 'exit']
schema = {
'oneOf': [
one_or_more({'type': 'string'}),
{
'type': 'object',
'properties': {
'on_start': {'$ref': '#/definitions/phaseSettings'},
'on_input': {'$ref': '#/definitions/phaseSettings'},
'on_filter': {'$ref': '#/definitions/phaseSettings'},
'on_output': {'$ref': '#/definitions/phaseSettings'},
'on_exit': {'$ref': '#/definitions/phaseSettings'},
'fail_entries': {'type': 'boolean'},
'auto_escape': {'type': 'boolean'},
'encoding': {'type': 'string'},
'allow_background': {'type': 'boolean'},
},
'additionalProperties': False,
},
],
'definitions': {
'phaseSettings': {
'type': 'object',
'properties': {
'phase': one_or_more({'type': 'string'}),
'for_entries': one_or_more({'type': 'string'}),
'for_accepted': one_or_more({'type': 'string'}),
'for_rejected': one_or_more({'type': 'string'}),
'for_undecided': one_or_more({'type': 'string'}),
'for_failed': one_or_more({'type': 'string'}),
},
'additionalProperties': False,
}
},
}
def prepare_config(self, config):
if isinstance(config, basestring):
config = [config]
if isinstance(config, list):
config = {'on_output': {'for_accepted': config}}
if not config.get('encoding'):
config['encoding'] = io_encoding
for phase_name in config:
if phase_name.startswith('on_'):
for items_name in config[phase_name]:
if isinstance(config[phase_name][items_name], basestring):
config[phase_name][items_name] = [config[phase_name][items_name]]
return config
def execute_cmd(self, cmd, allow_background, encoding):
log.verbose('Executing: %s', cmd)
p = subprocess.Popen(
text_to_native_str(cmd, encoding=io_encoding),
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
close_fds=False,
)
if not allow_background:
r, w = (p.stdout, p.stdin)
response = r.read().decode(io_encoding)
r.close()
w.close()
if response:
log.info('Stdout: %s', response.rstrip()) # rstrip to get rid of newlines
return p.wait()
def execute(self, task, phase_name, config):
config = self.prepare_config(config)
if phase_name not in config:
log.debug('phase %s not configured' % phase_name)
return
name_map = {
'for_entries': task.entries,
'for_accepted': task.accepted,
'for_rejected': task.rejected,
'for_undecided': task.undecided,
'for_failed': task.failed,
}
allow_background = config.get('allow_background')
for operation, entries in name_map.items():
if operation not in config[phase_name]:
continue
log.debug(
'running phase_name: %s operation: %s entries: %s'
% (phase_name, operation, len(entries))
)
for entry in entries:
for cmd in config[phase_name][operation]:
entrydict = EscapingEntry(entry) if config.get('auto_escape') else entry
# Do string replacement from entry, but make sure quotes get escaped
try:
cmd = render_from_entry(cmd, entrydict)
except RenderError as e:
log.error('Could not set exec command for %s: %s' % (entry['title'], e))
# fail the entry if configured to do so
if config.get('fail_entries'):
entry.fail(
'Entry `%s` does not have required fields for string replacement.'
% entry['title']
)
continue
log.debug(
'phase_name: %s operation: %s cmd: %s' % (phase_name, operation, cmd)
)
if task.options.test:
log.info('Would execute: %s' % cmd)
else:
# Make sure the command can be encoded into appropriate encoding, don't actually encode yet,
# so logging continues to work.
try:
cmd.encode(config['encoding'])
except UnicodeEncodeError:
log.error(
'Unable to encode cmd `%s` to %s' % (cmd, config['encoding'])
)
if config.get('fail_entries'):
entry.fail(
'cmd `%s` could not be encoded to %s.'
% (cmd, config['encoding'])
)
continue
# Run the command, fail entries with non-zero return code if configured to
if self.execute_cmd(
cmd, allow_background, config['encoding']
) != 0 and config.get('fail_entries'):
entry.fail('exec return code was non-zero')
# phase keyword in this
if 'phase' in config[phase_name]:
for cmd in config[phase_name]['phase']:
try:
cmd = render_from_task(cmd, task)
except RenderError as e:
log.error('Error rendering `%s`: %s' % (cmd, e))
else:
log.debug('phase cmd: %s' % cmd)
if task.options.test:
log.info('Would execute: %s' % cmd)
else:
self.execute_cmd(cmd, allow_background, config['encoding'])
def __getattr__(self, item):
"""Creates methods to handle task phases."""
for phase in self.HANDLED_PHASES:
if item == plugin.phase_methods[phase]:
# A phase method we handle has been requested
break
else:
# We don't handle this phase
raise AttributeError(item)
def phase_handler(task, config):
self.execute(task, 'on_' + phase, config)
# Make sure we run after other plugins so exec can use their output
phase_handler.priority = 100
return phase_handler
@event('plugin.register')
def register_plugin():
plugin.register(PluginExec, 'exec', api_ver=2)
| {
"content_hash": "20b515c37f1429011f321a0e5963a4a3",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 116,
"avg_line_length": 38.69911504424779,
"alnum_prop": 0.49702721243997255,
"repo_name": "tobinjt/Flexget",
"id": "ad20bb8661b716a0d1c33171eabba408d4397dcf",
"size": "8746",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "flexget/plugins/output/exec.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11875"
},
{
"name": "Dockerfile",
"bytes": "2338"
},
{
"name": "HTML",
"bytes": "79800"
},
{
"name": "JavaScript",
"bytes": "263723"
},
{
"name": "Python",
"bytes": "3492888"
},
{
"name": "SRecode Template",
"bytes": "3"
},
{
"name": "Shell",
"bytes": "1576"
}
],
"symlink_target": ""
} |
"""
Django settings for tournament_registration_project project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from os.path import join, dirname
from configurations import Configuration, values
BASE_DIR = dirname(dirname(__file__))
class Common(Configuration):
# APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
'django.contrib.admindocs',
)
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
'avatar', # for user avatars
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
)
# Apps specific for this project go here.
LOCAL_APPS = (
'users', # custom users app
'registration',
'payment',
'capitalism',
# Your stuff: custom apps go here
'django_extensions',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# END APP CONFIGURATION
# MIDDLEWARE CONFIGURATION
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'djangosecure.middleware.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# END MIDDLEWARE CONFIGURATION
# MIGRATIONS CONFIGURATION
MIGRATION_MODULES = {
'sites': 'contrib.sites.migrations'
}
# END MIGRATIONS CONFIGURATION
# DEBUG
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = values.BooleanValue(False)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
# END DEBUG
# SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
# In production, this is changed to a values.SecretValue() setting
SECRET_KEY = "CHANGEME!!!"
# END SECRET CONFIGURATION
# FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
join(BASE_DIR, 'fixtures'),
)
# END FIXTURE CONFIGURATION
# EMAIL CONFIGURATION
EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend')
# END EMAIL CONFIGURATION
# MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Andrés Saraos Luna', 'eldruz@outlook.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# END MANAGER CONFIGURATION
# DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = values.DatabaseURLValue('postgres://localhost/tournament_registration')
# END DATABASE CONFIGURATION
# CACHING
# Do this here because thanks to django-pylibmc-sasl and pylibmc
# memcacheify (used on heroku) is painful to install on windows.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# END CACHING
# SESSION
# Use a cached persistent session
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
# END SESSION CONFIGURATION
# GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'Europe/Paris'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'fr-fr'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# END GENERAL CONFIGURATION
# TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
"allauth.account.context_processors.account",
"allauth.socialaccount.context_processors.socialaccount",
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
# Your stuff: custom template context processers go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
join(BASE_DIR, 'templates'),
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# END TEMPLATE CONFIGURATION
# STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'staticfiles')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
join(BASE_DIR, 'static'),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# END STATIC FILE CONFIGURATION
# MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = join(BASE_DIR, 'media')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# END MEDIA CONFIGURATION
# URL Configuration
ROOT_URLCONF = 'urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'wsgi.application'
# End URL Configuration
# AUTHENTICATION CONFIGURATION
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = "username"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
# END AUTHENTICATION CONFIGURATION
# Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = "users.User"
LOGIN_REDIRECT_URL = "users:redirect"
LOGIN_URL = "account_login"
# END Custom user app defaults
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = "slugify.slugify"
# END SLUGLIFIER
# LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# END LOGGING CONFIGURATION
# Your common stuff: Below this line define 3rd party library settings
| {
"content_hash": "ef22c82c0bed7c4717b5441e679bf8bf",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 102,
"avg_line_length": 33.061371841155236,
"alnum_prop": 0.6583315134308801,
"repo_name": "eldruz/tournament_registration",
"id": "52f90fc131f6ead46803c77cf6cc4b8c3b130cd7",
"size": "9183",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tournament_registration/config/common.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1210"
},
{
"name": "JavaScript",
"bytes": "44"
},
{
"name": "Python",
"bytes": "100432"
},
{
"name": "Shell",
"bytes": "5129"
}
],
"symlink_target": ""
} |
from base64 import b64decode
import binascii
import codecs
from io import BytesIO
import struct
import re
from Crypto.Cipher import AES
from Crypto.Util import number
from Crypto.PublicKey import RSA
from .account import Account
from .chunk import Chunk
# OpenSSL constant
RSA_PKCS1_OAEP_PADDING = 4
# Secure note types that contain account-like information
ALLOWED_SECURE_NOTE_TYPES = [
b"Server",
b"Email Account",
b"Database",
b"Instant Messenger",
]
def extract_chunks(blob):
"""Splits the blob into chucks grouped by kind."""
chunks = []
stream = BytesIO(blob.bytes)
current_pos = stream.tell()
stream.seek(0, 2)
length = stream.tell()
stream.seek(current_pos, 0)
while stream.tell() < length:
chunks.append(read_chunk(stream))
return chunks
def parse_ACCT(chunk, encryption_key):
"""
Parses an account chunk, decrypts and creates an Account object.
May return nil when the chunk does not represent an account.
All secure notes are ACCTs but not all of them strore account
information.
"""
# TODO: Make a test case that covers secure note account
io = BytesIO(chunk.payload)
id = read_item(io)
name = decode_aes256_plain_auto(read_item(io), encryption_key)
group = decode_aes256_plain_auto(read_item(io), encryption_key)
url = decode_hex(read_item(io))
notes = decode_aes256_plain_auto(read_item(io), encryption_key)
skip_item(io, 2)
username = decode_aes256_plain_auto(read_item(io), encryption_key)
password = decode_aes256_plain_auto(read_item(io), encryption_key)
skip_item(io, 2)
secure_note = read_item(io)
# Parse secure note
if secure_note == b"1":
skip_item(io, 17)
secure_note_type = read_item(io)
# Only "Server" secure note stores account information
if secure_note_type not in ALLOWED_SECURE_NOTE_TYPES:
return None
url, username, password = parse_secure_note_server(notes)
return Account(id, name, username, password, url, group)
def parse_PRIK(chunk, encryption_key):
"""Parse PRIK chunk which contains private RSA key"""
decrypted = decode_aes256('cbc',
encryption_key[:16],
decode_hex(chunk.payload),
encryption_key)
hex_key = re.match(br'^LastPassPrivateKey<(?P<hex_key>.*)>LastPassPrivateKey$', decrypted).group('hex_key')
rsa_key = RSA.importKey(decode_hex(hex_key))
rsa_key.dmp1 = rsa_key.d % (rsa_key.p - 1)
rsa_key.dmq1 = rsa_key.d % (rsa_key.q - 1)
rsa_key.iqmp = number.inverse(rsa_key.q, rsa_key.p)
return rsa_key
def parse_SHAR(chunk, encryption_key, rsa_key):
# TODO: Fake some data and make a test
io = BytesIO(chunk.payload)
id = read_item(io)
encrypted_key = decode_hex(read_item(io))
encrypted_name = read_item(io)
skip_item(io, 2)
key = read_item(io)
# Shared folder encryption key might come already in pre-decrypted form,
# where it's only AES encrypted with the regular encryption key.
# When the key is blank, then there's a RSA encrypted key, which has to
# be decrypted first before use.
if not key:
# TODO: rsa_key.private_decrypt(encrypted_key, RSA_PKCS1_OAEP_PADDING)
key = decode_hex(rsa_key.decrypt(encrypted_key))
else:
key = decode_hex(decode_aes256_plain_auto(key, encryption_key))
name = decode_aes256_base64_auto(encrypted_name, key)
# TODO: Return an object, not a dict
return {'id': id, 'name': name, 'encryption_key': key}
def parse_secure_note_server(notes):
url = None
username = None
password = None
for i in notes.split(b'\n'):
if not i: # blank line
continue
key, value = i.split(b':')
if key == b'Hostname':
url = value
elif key == b'Username':
username = value
elif key == b'Password':
password = value
return [url, username, password]
def read_chunk(stream):
"""Reads one chunk from a stream and creates a Chunk object with the data read."""
# LastPass blob chunk is made up of 4-byte ID,
# big endian 4-byte size and payload of that size.
#
# Example:
# 0000: "IDID"
# 0004: 4
# 0008: 0xDE 0xAD 0xBE 0xEF
# 000C: --- Next chunk ---
return Chunk(read_id(stream), read_payload(stream, read_size(stream)))
def read_item(stream):
"""Reads an item from a stream and returns it as a string of bytes."""
# An item in an itemized chunk is made up of the
# big endian size and the payload of that size.
#
# Example:
# 0000: 4
# 0004: 0xDE 0xAD 0xBE 0xEF
# 0008: --- Next item ---
return read_payload(stream, read_size(stream))
def skip_item(stream, times=1):
"""Skips an item in a stream."""
for i in range(times):
read_item(stream)
def read_id(stream):
"""Reads a chunk ID from a stream."""
return stream.read(4)
def read_size(stream):
"""Reads a chunk or an item ID."""
return read_uint32(stream)
def read_payload(stream, size):
"""Reads a payload of a given size from a stream."""
return stream.read(size)
def read_uint32(stream):
"""Reads an unsigned 32 bit integer from a stream."""
return struct.unpack('>I', stream.read(4))[0]
def decode_hex(data):
"""Decodes a hex encoded string into raw bytes."""
try:
return codecs.decode(data, 'hex_codec')
except binascii.Error:
raise TypeError()
def decode_base64(data):
"""Decodes a base64 encoded string into raw bytes."""
return b64decode(data)
def decode_aes256_plain_auto(data, encryption_key):
"""Guesses AES cipher (EBC or CBD) from the length of the plain data."""
assert isinstance(data, bytes)
length = len(data)
if length == 0:
return b''
elif data[0] == b'!'[0] and length % 16 == 1 and length > 32:
return decode_aes256_cbc_plain(data, encryption_key)
else:
return decode_aes256_ecb_plain(data, encryption_key)
def decode_aes256_base64_auto(data, encryption_key):
"""Guesses AES cipher (EBC or CBD) from the length of the base64 encoded data."""
assert isinstance(data, bytes)
length = len(data)
if length == 0:
return b''
elif data[0] == b'!'[0]:
return decode_aes256_cbc_base64(data, encryption_key)
else:
return decode_aes256_ecb_base64(data, encryption_key)
def decode_aes256_ecb_plain(data, encryption_key):
"""Decrypts AES-256 ECB bytes."""
if not data:
return b''
else:
return decode_aes256('ecb', '', data, encryption_key)
def decode_aes256_ecb_base64(data, encryption_key):
"""Decrypts base64 encoded AES-256 ECB bytes."""
return decode_aes256_ecb_plain(decode_base64(data), encryption_key)
def decode_aes256_cbc_plain(data, encryption_key):
"""Decrypts AES-256 CBC bytes."""
if not data:
return b''
else:
# LastPass AES-256/CBC encryted string starts with an "!".
# Next 16 bytes are the IV for the cipher.
# And the rest is the encrypted payload.
return decode_aes256('cbc', data[1:17], data[17:], encryption_key)
def decode_aes256_cbc_base64(data, encryption_key):
"""Decrypts base64 encoded AES-256 CBC bytes."""
if not data:
return b''
else:
# LastPass AES-256/CBC/base64 encryted string starts with an "!".
# Next 24 bytes are the base64 encoded IV for the cipher.
# Then comes the "|".
# And the rest is the base64 encoded encrypted payload.
return decode_aes256(
'cbc',
decode_base64(data[1:25]),
decode_base64(data[26:]),
encryption_key)
def decode_aes256(cipher, iv, data, encryption_key):
"""
Decrypt AES-256 bytes.
Allowed ciphers are: :ecb, :cbc.
If for :ecb iv is not used and should be set to "".
"""
if cipher == 'cbc':
aes_mode = AES.MODE_CBC
elif cipher == 'ecb':
aes_mode = AES.MODE_ECB
else:
raise ValueError('Unknown AES mode')
aes = AES.new(encryption_key, aes_mode, iv)
d = aes.decrypt(data)
# http://passingcuriosity.com/2009/aes-encryption-in-python-with-m2crypto/
unpad = lambda s: s[0:-ord(d[-1:])]
return unpad(d)
| {
"content_hash": "d65d795988d71cfa5b4780772995b679",
"timestamp": "",
"source": "github",
"line_count": 284,
"max_line_length": 111,
"avg_line_length": 29.535211267605632,
"alnum_prop": 0.6325703385789223,
"repo_name": "dhercher/lastpass-python",
"id": "5020854f891dc36795567e1121251e599b9dcea9",
"size": "8404",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lastpass/parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "135374"
}
],
"symlink_target": ""
} |
import json
import base64
from moto.core.responses import BaseResponse
from typing import Dict, List, Tuple, Union
from .models import acm_backends, AWSCertificateManagerBackend
from .exceptions import AWSValidationException
GENERIC_RESPONSE_TYPE = Union[str, Tuple[str, Dict[str, int]]]
class AWSCertificateManagerResponse(BaseResponse):
def __init__(self) -> None:
super().__init__(service_name="acm")
@property
def acm_backend(self) -> AWSCertificateManagerBackend:
return acm_backends[self.current_account][self.region]
def add_tags_to_certificate(self) -> GENERIC_RESPONSE_TYPE:
arn = self._get_param("CertificateArn")
tags = self._get_param("Tags")
if arn is None:
msg = "A required parameter for the specified action is not supplied."
return (
json.dumps({"__type": "MissingParameter", "message": msg}),
dict(status=400),
)
self.acm_backend.add_tags_to_certificate(arn, tags)
return ""
def delete_certificate(self) -> GENERIC_RESPONSE_TYPE:
arn = self._get_param("CertificateArn")
if arn is None:
msg = "A required parameter for the specified action is not supplied."
return (
json.dumps({"__type": "MissingParameter", "message": msg}),
dict(status=400),
)
self.acm_backend.delete_certificate(arn)
return ""
def describe_certificate(self) -> GENERIC_RESPONSE_TYPE:
arn = self._get_param("CertificateArn")
if arn is None:
msg = "A required parameter for the specified action is not supplied."
return (
json.dumps({"__type": "MissingParameter", "message": msg}),
dict(status=400),
)
cert_bundle = self.acm_backend.get_certificate(arn)
return json.dumps(cert_bundle.describe())
def get_certificate(self) -> GENERIC_RESPONSE_TYPE:
arn = self._get_param("CertificateArn")
if arn is None:
msg = "A required parameter for the specified action is not supplied."
return (
json.dumps({"__type": "MissingParameter", "message": msg}),
dict(status=400),
)
cert_bundle = self.acm_backend.get_certificate(arn)
result = {
"Certificate": cert_bundle.cert.decode(),
"CertificateChain": cert_bundle.chain.decode(),
}
return json.dumps(result)
def import_certificate(self) -> str:
"""
Returns errors on:
Certificate, PrivateKey or Chain not being properly formatted
Arn not existing if its provided
PrivateKey size > 2048
Certificate expired or is not yet in effect
Does not return errors on:
Checking Certificate is legit, or a selfsigned chain is provided
:return: str(JSON) for response
"""
certificate = self._get_param("Certificate")
private_key = self._get_param("PrivateKey")
chain = self._get_param("CertificateChain") # Optional
current_arn = self._get_param("CertificateArn") # Optional
tags = self._get_param("Tags") # Optional
# Simple parameter decoding. Rather do it here as its a data transport decision not part of the
# actual data
try:
certificate = base64.standard_b64decode(certificate)
except Exception:
raise AWSValidationException(
"The certificate is not PEM-encoded or is not valid."
)
try:
private_key = base64.standard_b64decode(private_key)
except Exception:
raise AWSValidationException(
"The private key is not PEM-encoded or is not valid."
)
if chain is not None:
try:
chain = base64.standard_b64decode(chain)
except Exception:
raise AWSValidationException(
"The certificate chain is not PEM-encoded or is not valid."
)
arn = self.acm_backend.import_cert(
certificate, private_key, chain=chain, arn=current_arn, tags=tags
)
return json.dumps({"CertificateArn": arn})
def list_certificates(self) -> str:
certs = []
statuses = self._get_param("CertificateStatuses")
for cert_bundle in self.acm_backend.get_certificates_list(statuses):
certs.append(
{
"CertificateArn": cert_bundle.arn,
"DomainName": cert_bundle.common_name,
}
)
result = {"CertificateSummaryList": certs}
return json.dumps(result)
def list_tags_for_certificate(self) -> GENERIC_RESPONSE_TYPE:
arn = self._get_param("CertificateArn")
if arn is None:
msg = "A required parameter for the specified action is not supplied."
return json.dumps({"__type": "MissingParameter", "message": msg}), dict(
status=400
)
cert_bundle = self.acm_backend.get_certificate(arn)
result: Dict[str, List[Dict[str, str]]] = {"Tags": []}
# Tag "objects" can not contain the Value part
for key, value in cert_bundle.tags.items():
tag_dict = {"Key": key}
if value is not None:
tag_dict["Value"] = value
result["Tags"].append(tag_dict)
return json.dumps(result)
def remove_tags_from_certificate(self) -> GENERIC_RESPONSE_TYPE:
arn = self._get_param("CertificateArn")
tags = self._get_param("Tags")
if arn is None:
msg = "A required parameter for the specified action is not supplied."
return (
json.dumps({"__type": "MissingParameter", "message": msg}),
dict(status=400),
)
self.acm_backend.remove_tags_from_certificate(arn, tags)
return ""
def request_certificate(self) -> GENERIC_RESPONSE_TYPE:
domain_name = self._get_param("DomainName")
idempotency_token = self._get_param("IdempotencyToken")
subject_alt_names = self._get_param("SubjectAlternativeNames")
tags = self._get_param("Tags") # Optional
if subject_alt_names is not None and len(subject_alt_names) > 10:
# There is initial AWS limit of 10
msg = (
"An ACM limit has been exceeded. Need to request SAN limit to be raised"
)
return (
json.dumps({"__type": "LimitExceededException", "message": msg}),
dict(status=400),
)
arn = self.acm_backend.request_certificate(
domain_name,
idempotency_token,
subject_alt_names,
tags,
)
return json.dumps({"CertificateArn": arn})
def resend_validation_email(self) -> GENERIC_RESPONSE_TYPE:
arn = self._get_param("CertificateArn")
domain = self._get_param("Domain")
# ValidationDomain not used yet.
# Contains domain which is equal to or a subset of Domain
# that AWS will send validation emails to
# https://docs.aws.amazon.com/acm/latest/APIReference/API_ResendValidationEmail.html
# validation_domain = self._get_param('ValidationDomain')
if arn is None:
msg = "A required parameter for the specified action is not supplied."
return (
json.dumps({"__type": "MissingParameter", "message": msg}),
dict(status=400),
)
cert_bundle = self.acm_backend.get_certificate(arn)
if cert_bundle.common_name != domain:
msg = "Parameter Domain does not match certificate domain"
_type = "InvalidDomainValidationOptionsException"
return json.dumps({"__type": _type, "message": msg}), dict(status=400)
return ""
def export_certificate(self) -> GENERIC_RESPONSE_TYPE:
certificate_arn = self._get_param("CertificateArn")
passphrase = self._get_param("Passphrase")
if certificate_arn is None:
msg = "A required parameter for the specified action is not supplied."
return (
json.dumps({"__type": "MissingParameter", "message": msg}),
dict(status=400),
)
(
certificate,
certificate_chain,
private_key,
) = self.acm_backend.export_certificate(
certificate_arn=certificate_arn, passphrase=passphrase
)
return json.dumps(
dict(
Certificate=certificate,
CertificateChain=certificate_chain,
PrivateKey=private_key,
)
)
| {
"content_hash": "4bce64a089b9034c2efe9bffa6932a75",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 103,
"avg_line_length": 35.16600790513834,
"alnum_prop": 0.578397212543554,
"repo_name": "spulec/moto",
"id": "8194cd5a573a5ebd19f380fa487354497f2c96d3",
"size": "8897",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "moto/acm/responses.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "255"
},
{
"name": "HTML",
"bytes": "5983"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "1424"
},
{
"name": "Jinja",
"bytes": "2502"
},
{
"name": "Makefile",
"bytes": "2284"
},
{
"name": "Python",
"bytes": "14737868"
},
{
"name": "Ruby",
"bytes": "188"
},
{
"name": "Scala",
"bytes": "782"
},
{
"name": "Shell",
"bytes": "5515"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('warehouse', '0006_message_message_id'),
]
operations = [
migrations.AddField(
model_name='message',
name='import_job',
field=models.ForeignKey(to='warehouse.ImportJob', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='session',
name='status',
field=models.CharField(max_length=255, null=True, verbose_name=b'Session status', choices=[(b'started', b'Started'), (b'completed', b'Completed'), (b'failed', b'Failed')]),
preserve_default=True,
),
migrations.AlterField(
model_name='message',
name='session',
field=models.ForeignKey(to='warehouse.Session', null=True),
preserve_default=True,
),
]
| {
"content_hash": "e55de6c91ae4f10776a57a6e0af11b99",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 184,
"avg_line_length": 31.387096774193548,
"alnum_prop": 0.5775950668036999,
"repo_name": "smn/goggles",
"id": "b2c3a441c0ecf6ad68748994a27f0508319828b3",
"size": "997",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "goggles/warehouse/migrations/0007_auto_20141222_1156.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "75682"
},
{
"name": "Shell",
"bytes": "256"
}
],
"symlink_target": ""
} |
from os import path
from pkg_resources import Requirement, ResolutionError, parse_version, require
from pyinfra import __version__, state
from .exceptions import PyinfraError
class ConfigDefaults:
# % of hosts which have to fail for all operations to stop
FAIL_PERCENT = None
# Seconds to timeout SSH connections
CONNECT_TIMEOUT = 10
# Temporary directory (on the remote side) to use for caching any files/downloads
TEMP_DIR = "/tmp"
# Gevent pool size (defaults to #of target hosts)
PARALLEL = 0
# Specify the required pyinfra version (using PEP 440 setuptools specifier)
REQUIRE_PYINFRA_VERSION = None
# Specify any required packages (either using PEP 440 or a requirements file)
# Note: this can also include pyinfra potentially replacing REQUIRE_PYINFRA_VERSION
REQUIRE_PACKAGES = None
# All these can be overridden inside individual operation calls:
# Switch to this user (from ssh_user) using su before executing operations
SU_USER = None
USE_SU_LOGIN = False
SU_SHELL = None
PRESERVE_SU_ENV = False
# Use sudo and optional user
SUDO = False
SUDO_USER = None
PRESERVE_SUDO_ENV = False
USE_SUDO_LOGIN = False
USE_SUDO_PASSWORD = False
# Use doas and optional user
DOAS = False
DOAS_USER = None
# Use doas and optional user
DOAS = False
DOAS_USER = None
# Only show errors but don't count as failure
IGNORE_ERRORS = False
# Shell to use to execute commands
SHELL = "sh"
config_defaults = {key: value for key, value in ConfigDefaults.__dict__.items() if key.isupper()}
def check_pyinfra_version(version: str):
if not version:
return
running_version = parse_version(__version__)
required_versions = Requirement.parse(
"pyinfra{0}".format(version),
)
if running_version not in required_versions:
raise PyinfraError(
("pyinfra version requirement not met " "(requires {0}, running {1})").format(
version,
__version__,
),
)
def check_require_packages(requirements_config):
if not requirements_config:
return
if isinstance(requirements_config, (list, tuple)):
requirements = requirements_config
else:
with open(path.join(state.cwd, requirements_config), encoding="utf-8") as f:
requirements = [line.split("#egg=")[-1] for line in f.read().splitlines()]
try:
require(requirements)
except ResolutionError as e:
raise PyinfraError(
"Deploy requirements ({0}) not met: {1}".format(
requirements_config,
e,
),
)
config_checkers = {
"REQUIRE_PYINFRA_VERSION": check_pyinfra_version,
"REQUIRE_PACKAGES": check_require_packages,
}
class Config(ConfigDefaults):
"""
The default/base configuration options for a pyinfra deploy.
"""
def __init__(self, **kwargs):
# Always apply some env
env = kwargs.pop("ENV", {})
self.ENV = env
config = config_defaults.copy()
config.update(kwargs)
for key, value in config.items():
setattr(self, key, value)
def __setattr__(self, key, value):
super().__setattr__(key, value)
checker = config_checkers.get(key)
if checker:
checker(value)
def get_current_state(self):
return [(key, getattr(self, key)) for key in config_defaults.keys()]
def set_current_state(self, config_state):
for key, value in config_state:
setattr(self, key, value)
def lock_current_state(self):
self._locked_config = self.get_current_state()
def reset_locked_state(self):
self.set_current_state(self._locked_config)
def copy(self) -> "Config":
return Config(**dict(self.get_current_state()))
| {
"content_hash": "e7f065eec0a4fb3de05f1d55a9470889",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 97,
"avg_line_length": 29.285714285714285,
"alnum_prop": 0.634403080872914,
"repo_name": "Fizzadar/pyinfra",
"id": "cd221bd0a259d90f439247e51bdb998ded1f8ae0",
"size": "3895",
"binary": false,
"copies": "1",
"ref": "refs/heads/2.x",
"path": "pyinfra/api/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jinja",
"bytes": "57"
},
{
"name": "Python",
"bytes": "861601"
},
{
"name": "Shell",
"bytes": "3448"
}
],
"symlink_target": ""
} |
"""Provide configuration end points for Groups."""
from homeassistant.components.group import DOMAIN, GROUP_SCHEMA
from homeassistant.config import GROUP_CONFIG_PATH
from homeassistant.const import SERVICE_RELOAD
import homeassistant.helpers.config_validation as cv
from . import EditKeyBasedConfigView
async def async_setup(hass):
"""Set up the Group config API."""
async def hook(hass):
"""post_write_hook for Config View that reloads groups."""
await hass.services.async_call(DOMAIN, SERVICE_RELOAD)
hass.http.register_view(
EditKeyBasedConfigView(
"group",
"config",
GROUP_CONFIG_PATH,
cv.slug,
GROUP_SCHEMA,
post_write_hook=hook,
)
)
return True
| {
"content_hash": "0ef1d90d06a93b7b640415b354591bcd",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 66,
"avg_line_length": 28.88888888888889,
"alnum_prop": 0.6615384615384615,
"repo_name": "leppa/home-assistant",
"id": "d95891af6556c16f6b44a398497a4872ea4878a3",
"size": "780",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/config/group.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18957740"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
} |
import sys
sys.path.append('../../')
from proximal.utils.utils import *
from proximal.halide.halide import *
from proximal.lin_ops import *
import numpy as np
from scipy import signal
from scipy.misc import face
import matplotlib.pyplot as plt
############################################################
# Load image
np_img = get_test_image(512, color=True)
plt.figure()
plt.subplot(221)
plt.imshow(np_img, interpolation="nearest", clim=(0.0, 1.0), cmap='gray')
plt.title('Numpy')
tic()
fn = mul_color(Variable(np_img.shape), mode='yuv')
output = np.empty(np_img.shape, dtype=np.float32, order='F')
fn.forward([np_img], [output])
print('Running color transform took: {0:.1f}ms'.format(toc()))
mi = np.amin(output)
ma = np.amax(output)
print('Y colorspace Min/Max are: [{:0.1f}, {:0.1f}]'.format(mi, ma))
plt.subplot(222)
plt.imshow(np.maximum(output[..., 0], 0.0),
interpolation="nearest",
clim=(0.0, 1.0),
cmap='gray')
plt.colorbar()
plt.title('Y colorspace')
plt.subplot(223)
plt.imshow(output[..., 1],
interpolation="nearest",
cmap='gray')
plt.colorbar()
plt.title('U colorspace')
plt.subplot(224)
plt.imshow(output[..., 2],
interpolation="nearest",
cmap='gray')
plt.colorbar()
plt.title('V colorspace')
plt.show()
| {
"content_hash": "9e36a174ac2ef490fd0c0442a4692f4f",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 73,
"avg_line_length": 24.185185185185187,
"alnum_prop": 0.6232771822358346,
"repo_name": "comp-imaging/ProxImaL",
"id": "3bc9057a1fa240499f945dda4d883a7b57cb1e06",
"size": "1317",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "proximal/examples/test_color.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5500"
},
{
"name": "C++",
"bytes": "135520"
},
{
"name": "Jinja",
"bytes": "3865"
},
{
"name": "Meson",
"bytes": "7090"
},
{
"name": "Python",
"bytes": "397574"
},
{
"name": "Shell",
"bytes": "689"
}
],
"symlink_target": ""
} |
import json, sublime, webbrowser
from ..completions import CompletionList
from ..inline_documentation import Documentation
from .. import utils
DOC_STYLES = {
"side_color": "#336B81",
"header_color": "#379AC1",
"header_bg_color": "#E1E1E1",
"text_color": "#585B31"
}
testbox = {"completions": {}, "documentation": {}}
def plugin_loaded():
sublime.set_timeout_async(load)
def load():
global testbox
completions_data = load_json_data("completions")
for key in completions_data:
cfc = key.split(".").pop().capitalize()
if cfc == "Basespec":
cfc = "BaseSpec"
testbox["completions"][key] = [(comp_key + "\t" + "TestBox " + cfc, completions_data[key][comp_key]) for comp_key in sorted(completions_data[key].keys())]
if key == "expectation":
negated_completions = [(negate_string(comp_key) + "\t" + cfc, negate_string(completions_data[key][comp_key])) for comp_key in sorted(completions_data[key].keys())]
testbox["completions"][key].extend(negated_completions)
testbox["documentation"] = load_json_data("documentation")
def load_json_data(filename):
json_data = sublime.load_resource("Packages/" + utils.get_plugin_name() + "/src/testbox/json/" + filename + ".json")
return json.loads(json_data)
def negate_string(string_to_negate):
return "not" + string_to_negate[0].upper() + string_to_negate[1:]
def get_setting(view, setting_key):
if setting_key in view.window().project_data():
return view.window().project_data()[setting_key]
package_settings = sublime.load_settings("lucee_package.sublime-settings")
return package_settings.get(setting_key)
def extends_testbox(view):
extends_regions = view.find_by_selector("entity.other.inherited-class.cfml")
if len(extends_regions) > 0:
extends = view.substr(extends_regions[0])
return extends.lower() == "testbox.system.basespec"
return False
def is_testbox_file(view):
if extends_testbox(view):
return True
file_path = view.file_name().replace("\\", "/").lower()
for folder in get_setting(view, "testbox_folders"):
if "/" + folder in file_path:
return True
return False
def get_dot_completions(view, prefix, position, info):
if not get_setting(view, "testbox_enabled"):
return None
# expectations
if is_testbox_file(view) and len(info["dot_context"]) > 0 and info["dot_context"][-1].name == "expect":
return CompletionList(testbox["completions"]["expectation"], 1, False)
# assertions
if is_testbox_file(view) and len(info["dot_context"]) == 1 and info["dot_context"][-1].name == "assert":
return CompletionList(testbox["completions"]["assertion"], 1, False)
return None
def get_script_completions(view, prefix, position, info):
if not get_setting(view, "testbox_enabled"):
return None
if is_testbox_file(view) and view.match_selector(position, "meta.group.braces.curly"):
return CompletionList(testbox["completions"]["basespec"], 1, False)
return None
def get_inline_documentation(view, position):
if not get_setting(view, "testbox_enabled") or not is_testbox_file(view):
return None
if view.match_selector(position, "meta.function-call.method.cfml"):
function_name, function_name_region, function_args_region = utils.get_function_call(view, position)
if view.substr(function_name_region.begin() - 1) == ".":
dot_context = utils.get_dot_context(view, function_name_region.begin() - 1)
if dot_context[-1].name == "expect":
if function_name in testbox["documentation"]["expectation"]:
return Documentation(get_documentation(function_name, testbox["documentation"]["expectation"][function_name]), None, 2)
if len(function_name) > 3 and function_name[:3] == "not" and function_name[3:] in testbox["documentation"]["expectation"]:
return Documentation(get_documentation(function_name, testbox["documentation"]["expectation"][function_name[3:]], True), None, 2)
if dot_context[-1].name == "assert" and function_name in testbox["documentation"]["assertion"]:
return Documentation(get_documentation(function_name, testbox["documentation"]["assertion"][function_name]), None, 2)
if view.match_selector(position, "meta.function-call.cfml"):
function_name, function_name_region, function_args_region = utils.get_function_call(view, position)
if function_name in testbox["documentation"]["basespec"]:
return Documentation(get_documentation(function_name, testbox["documentation"]["basespec"][function_name]), None, 2)
return None
def get_documentation(key, metadata, negated=False):
testbox_doc = dict(DOC_STYLES)
testbox_doc["header"] = metadata["header"]
testbox_doc["description"] = metadata["description"]
testbox_doc["body"] = metadata["body"]
testbox_doc["links"] = metadata["links"]
if negated:
testbox_doc["header"] += " (negated)"
return testbox_doc | {
"content_hash": "98eb7ce975b0fd072fc6b27d3b9ea1e2",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 166,
"avg_line_length": 39.84033613445378,
"alnum_prop": 0.7144062434085636,
"repo_name": "jcberquist/SublimeText-Lucee",
"id": "ecc2e15a94f3164bfe24bdc1225692fd824ec4f1",
"size": "4741",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/testbox/testbox.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ColdFusion",
"bytes": "18785"
},
{
"name": "HTML",
"bytes": "1072"
},
{
"name": "JavaScript",
"bytes": "14513"
},
{
"name": "Python",
"bytes": "76044"
}
],
"symlink_target": ""
} |
from mldb import mldb, ResponseException
import unittest
class myTest(unittest.TestCase):
def test_sequence(self):
with self.assertRaisesRegex(ResponseException, "Executing builtin function exp: Can't convert value 'a' of type 'ASCII_STRING' to double") as re:
query = "SELECT exp('a')"
mldb.query(query)
with self.assertRaisesRegex(ResponseException, "Binding builtin function sqrt: expected 1 argument, got 3") as re:
query = "SELECT sqrt(1,2,3)"
mldb.query(query)
mldb.run_tests()
| {
"content_hash": "0a8e6833c125f55d21895beca65dea7d",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 153,
"avg_line_length": 37.06666666666667,
"alnum_prop": 0.6762589928057554,
"repo_name": "mldbai/mldb",
"id": "415b70d378805e64b32a5e52c13a3977b36bc3dd",
"size": "636",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testing/MLDB-1336-builtin-checks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "643"
},
{
"name": "C",
"bytes": "11754639"
},
{
"name": "C++",
"bytes": "14072572"
},
{
"name": "CMake",
"bytes": "2737"
},
{
"name": "CSS",
"bytes": "17037"
},
{
"name": "Dockerfile",
"bytes": "1591"
},
{
"name": "Fortran",
"bytes": "16349"
},
{
"name": "HTML",
"bytes": "311171"
},
{
"name": "JavaScript",
"bytes": "2209253"
},
{
"name": "Jupyter Notebook",
"bytes": "7661154"
},
{
"name": "Makefile",
"bytes": "290745"
},
{
"name": "Perl",
"bytes": "3890"
},
{
"name": "Python",
"bytes": "1422764"
},
{
"name": "Shell",
"bytes": "32489"
},
{
"name": "Smarty",
"bytes": "2938"
},
{
"name": "SourcePawn",
"bytes": "52752"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import sys
import getopt
import pywintypes
from win32cred import *
ERROR_NOT_FOUND = 0x490
ERROR_NO_SUCH_LOGON_SESSION = 0x520
ERROR_INVALID_FLAGS = 0x3ec
CRED_TYPE_DOMAIN_EXTENDED = 0x6
CRED_ENUMERATE_ALL_CREDENTIALS = 0x1
CredPersist = {
"no": 0,
"session": CRED_PERSIST_SESSION,
"local": CRED_PERSIST_LOCAL_MACHINE,
"enterprise": CRED_PERSIST_ENTERPRISE,
}
CredType = {
"generic": CRED_TYPE_GENERIC,
"domain": CRED_TYPE_DOMAIN_PASSWORD,
"domcert": CRED_TYPE_DOMAIN_CERTIFICATE,
"passport": CRED_TYPE_DOMAIN_VISIBLE_PASSWORD,
"domext": CRED_TYPE_DOMAIN_EXTENDED,
}
CredFlags = {
"username-target": CRED_FLAGS_USERNAME_TARGET
}
def handleWinError(e):
(code, func, error) = e
print("error: %s: [%d] %s" % (func, code, error), file=sys.stderr)
def findkey(haystack, needle, default=None):
for k, v in haystack.items():
if v == needle:
return k
return default
def display(cred, full=True):
cType = findkey(CredType, cred["Type"], "unknown")
cPersist = findkey(CredPersist, cred["Persist"], "unknown")
cFlags = [flag for flag, value in CredFlags.items() if cred["Flags"] & value]
cBlob = cred["CredentialBlob"]
if full:
fmt = "%12s %s"
print(fmt % ("target:", cred["TargetName"]))
if cred["TargetAlias"] is not None:
for a in cred["TargetAlias"]:
print(fmt % ("alias:", a))
print(fmt % ("type:", cType))
print(fmt % ("user:", cred["UserName"]))
print(fmt % ("comment:", cred["Comment"]))
print(fmt % ("persist:", cPersist))
for flag in cFlags:
print(fmt % ("flags:", flag))
for attr in cred["Attributes"]:
text = "%(Keyword)s=%(Value)s" % attr
print(fmt % ("attribute:", text))
if cBlob:
print(fmt % ("blob:", "<%d bytes>" % len(cBlob)))
#print(fmt % ("", repr(cBlob)))
print()
else:
trim = lambda string, n: string[:n-3]+"..." if len(string) > n else string
print("%(TargetName)-30s %(UserName)-30s %(Type)-8s %(Persist)-3s" % {
"TargetName": trim(cred["TargetName"], 30),
"UserName": trim(cred["UserName"], 30),
"Type": cType,
"Persist": cPersist[:7],
})
cred = {
"TargetName": None,
"UserName": None,
"Persist": CredPersist["local"],
"Type": CredType["generic"],
"Comment": None,
"Attributes": {},
"Flags": 0,
}
require = set()
try:
action = sys.argv[1]
except IndexError:
print("Usage:")
print(" cred {ls | ll} [targetprefix]")
print(" cred {new | rm | read | readdom | targetinfo} <target> [-t type] [-r require]")
sys.exit(2)
options, rest = getopt.gnu_getopt(sys.argv[2:], "a:c:f:P:r:t:u:")
for opt, arg in options:
if opt == "-a":
key, value = arg.split("=", 1)
cred["Attributes"][key] = value
elif opt == "-c":
cred["Comment"] = arg
elif opt == "-f":
if arg in CredFlags:
cred["Flags"] |= CredFlags[arg]
else:
raise ValueError("Unknown flag %r" % arg)
elif opt == "-P":
if arg in CredPersist:
cred["Persist"] = CredPersist[arg]
else:
raise ValueError("Invalid persist value %r" % arg)
elif opt == "-r":
if arg in ("admin", "nocert", "cert", "sc"):
require.add(arg)
elif opt == "-t":
if arg in CredType:
cred["Type"] = CredType[arg]
else:
raise ValueError("Invalid type %r" % arg)
elif opt == "-u":
cred["UserName"] = arg
if action in ("ls", "ll"):
full = (action == "ll")
try:
filter = rest.pop(0)+"*"
except IndexError:
filter = None
flags = 0
try:
if full:
for cred in CredEnumerate(filter, flags):
display(cred, True)
print
else:
print("%-30s %-30s %-8s %-3s" % ("Target", "User", "Type", "Persist"))
print("-"*79)
for cred in CredEnumerate(filter, flags):
display(cred, False)
except pywintypes.error as e:
if e[0] == ERROR_NOT_FOUND:
print("No credentials stored.")
else:
handleWinError(e)
elif action == "new":
cred["TargetName"] = rest.pop(0)
flags = 0
flags |= CREDUI_FLAGS_DO_NOT_PERSIST
if cred["Type"] == CRED_TYPE_GENERIC:
flags |= CREDUI_FLAGS_GENERIC_CREDENTIALS
flags |= CREDUI_FLAGS_ALWAYS_SHOW_UI
elif cred["Type"] == CRED_TYPE_DOMAIN_PASSWORD:
flags |= CREDUI_FLAGS_EXCLUDE_CERTIFICATES
elif cred["Type"] == CRED_TYPE_DOMAIN_CERTIFICATE:
flags |= CREDUI_FLAGS_REQUIRE_CERTIFICATE
if cred["Flags"] & CRED_FLAGS_USERNAME_TARGET:
flags |= CREDUI_FLAGS_USERNAME_TARGET_CREDENTIALS
cred["UserName"] = cred["TargetName"]
if "cert" in require:
flags |= CREDUI_FLAGS_REQUIRE_CERTIFICATE
if "sc" in require:
flags |= CREDUI_FLAGS_REQUIRE_SMARTCARD
if "nocert" in require:
flags |= CREDUI_FLAGS_EXCLUDE_CERTIFICATES
if "admin" in require:
flags |= CREDUI_FLAGS_REQUEST_ADMINISTRATOR
try:
user, blob, persist = CredUIPromptForCredentials(
cred["TargetName"], 0, cred["UserName"], None, False, flags)
cred["UserName"], cred["CredentialBlob"] = user, blob
CredWrite(cred)
except pywintypes.error as e:
handleWinError(e)
else:
cred = CredRead(cred["TargetName"], cred["Type"])
display(cred)
elif action == "add":
cred["TargetName"] = rest.pop(0)
CredWrite(cred)
elif action == "rm":
cred["TargetName"] = rest.pop(0)
try:
CredDelete(cred["TargetName"], cred["Type"])
except pywintypes.error as e:
handleWinError(e)
elif action == "read":
cred["TargetName"] = rest.pop(0)
try:
cred = CredRead(cred["TargetName"], cred["Type"])
display(cred)
except pywintypes.error as e:
handleWinError(e)
elif action == "readdom":
ttype, tname = rest.pop(0).split(":", 1)
keys = {
"target": "TargetName",
"nbserver": "NetbiosServerName",
"nbdomain": "NetbiosDomainName",
"server": "DnsServerName",
"domain": "DnsDomainName",
"tree": "DnsTreeName",
}
key = keys.get(ttype, keys["target"])
try:
for cred in CredReadDomainCredentials({key: tname}):
display(cred)
except pywintypes.error as e:
handleWinError(e)
elif action == "targetinfo":
for target in rest:
info = CredGetTargetInfo(target)
keys = info.keys()
keys.sort()
keys.remove("TargetName")
keys.insert(0, "TargetName")
for key in keys:
value = info[key]
if key == "CredTypes":
value = ", ".join(findkey(CredType, i, str(i))
for i in value) if value else None
elif key == "Flags":
flags = set()
if value & CRED_ALLOW_NAME_RESOLUTION:
flags.add("allow name resolution")
value = ", ".join(flags) if flags else None
print("%18s: %s" % (key, value or ""))
print()
else:
print("Error: Unknown action %r" % action)
| {
"content_hash": "a68437b3fc08ec44a994df8b143d9940",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 92,
"avg_line_length": 33.23076923076923,
"alnum_prop": 0.5299639917695473,
"repo_name": "grawity/hacks",
"id": "c0b08562b730ea41d11d078b187ace4317416a1c",
"size": "7837",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "security/win32-cred.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "496"
},
{
"name": "Batchfile",
"bytes": "1420"
},
{
"name": "C",
"bytes": "40900"
},
{
"name": "C#",
"bytes": "1753"
},
{
"name": "Haskell",
"bytes": "1027"
},
{
"name": "Makefile",
"bytes": "2519"
},
{
"name": "PHP",
"bytes": "38504"
},
{
"name": "Perl",
"bytes": "180846"
},
{
"name": "PowerShell",
"bytes": "759"
},
{
"name": "Python",
"bytes": "263007"
},
{
"name": "Ruby",
"bytes": "14323"
},
{
"name": "Shell",
"bytes": "53554"
}
],
"symlink_target": ""
} |
import json
import requests
from requests.auth import HTTPBasicAuth
from db import app
class apiApp:
def __init__(self,url,user,token):
self.url = url
self.user = user
self.token = token
self.response = None
self.appDB = app() # Objeto de tipo db, que nos ayudará a conectarnos a la DB
# Hace la petición
def getResponse(self):
self.response = requests.get(url=self.url,auth=HTTPBasicAuth(self.user,self.token))
# Crea y agurda en mongo una lista de todos los issues
def listIssues(self):
self.appDB.setCollection('issues')
self.getResponse()
r = json.loads(self.response.content)
numIssues = r[0]['number'] # Número total de issues
form = {}
self.appDB.eliminar() # Se eliminan los registros de la colección
for i in range(numIssues):
self.url = f"https://api.github.com/repos/pallets/flask/issues/{i+1}"
self.getResponse()
r = json.loads(self.response.content)
form['_id'] = (i+1)
for key,value in r.items():
if key == 'number':
continue
form[key] = value
self.appDB.insertar(form) # Ahora se inserta el contenido del diccionario dentro de la DB
print(f"Registros agregados a la colección ")
#MAIN
if __name__ == '__main__':
# URL de donde va a hacer la petición get
url = 'https://api.github.com/repos/pallets/flask/issues'
user = 'JuanAlejandroCR'
token = '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'
# Objeto de la api
newApi = apiApp(url, user, token)
newApi.listIssues() | {
"content_hash": "ff5905afbd2e2c6f63d8e3830093e909",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 115,
"avg_line_length": 32.833333333333336,
"alnum_prop": 0.5764241398759166,
"repo_name": "AnhellO/DAS_Sistemas",
"id": "e1b8ff53126ff81cfe55a3a33f2487f7f97f4bf2",
"size": "1779",
"binary": false,
"copies": "1",
"ref": "refs/heads/ene-jun-2022",
"path": "Ene-Jun-2022/juan-alejandro-calzoncit-rodriguez/Segundo Parcial/procesarIssues/script_issues.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "8515"
},
{
"name": "Go",
"bytes": "25845"
},
{
"name": "HTML",
"bytes": "36671"
},
{
"name": "Python",
"bytes": "716604"
}
],
"symlink_target": ""
} |
import pytest
from fixture.application import Application
fixture = None
@pytest.fixture(scope="session")
def app(request):
global fixture
if fixture is None:
fixture = Application()
fixture.session.login(username="admin", password="secret")
else:
if not fixture.is_valid():
fixture = Application()
fixture.session.login(username="admin", password="secret")
return fixture
@pytest.fixture(scope="session", autouse=True)
def stop(request):
def fin():
fixture.session.logout()
fixture.destroy()
request.addfinalizer(fin)
return fixture
| {
"content_hash": "a6b4144d31d81abb9976d26b591702f7",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 70,
"avg_line_length": 24.26923076923077,
"alnum_prop": 0.6608557844690967,
"repo_name": "AndriiL/python_training",
"id": "8de36042ac71a00f6d6a7db80201ad0ea2555800",
"size": "631",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conftest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17101"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from simpbot import control
from hashlib import new
from . import capab
class admins(control.control):
def __init__(self, network, user, hash, algm, logins, maxlogins, ison, conf,
path, timeout, verbose, account):
super(admins, self).__init__('.'.join(['simpbot-admin',
'global' if network is None else network, user]))
self.network = network
self.user = user
self.__password = hash
self.logins = logins
self.maxlogins = maxlogins
self.timeout = timeout
self.verbose = verbose
self.capab = []
self.hash_algorithm = algm
self.conf = conf
self.ison = ison
self.account = account
self.confpath = path
def __repr__(self):
if self.network:
network = 'network=%s' % self.network
else:
network = 'global'
return '<admin %s %s>' % (network, self.user)
def __str__(self):
network = 'local ' + self.network if self.network else 'global'
return '%s %s' % (network, self.user)
def isglobal(self):
return self.network is None
def checkpass(self, password):
return self.hash(password) == self.__password
def hash(self, text):
return new(self.hash_algorithm, text.encode('utf-8')).hexdigest()
def has_maxlogin(self):
if self.maxlogins == 0:
return False
else:
return self.logins == self.maxlogins
def logged(self):
return self.logins > 0
def update_password(self, new_pass):
self.__password = self.hash(new_pass)
self.save()
def set_capab(self, capability):
if capab.exists(capability) and not self.has_capab(capability):
self.capab.append(capability)
def del_capab(self, capability):
if self.has_capab(capability):
self.capab.remove(capability)
def has_capab(self, capability):
return capability in self.capab
def save(self):
if self.conf is None or self.confpath is None:
return
equal = lambda boolean: 'yes' if boolean else 'no'
admin = self.__str__()
if not self.conf.has_section(admin):
self.conf.add_section(admin)
self.conf.set(admin, 'password', self.__password)
self.conf.set(admin, 'timeout', str(self.timeout))
self.conf.set(admin, 'maxlogins', str(self.maxlogins))
self.conf.set(admin, 'verbose', equal(self.verbose))
self.conf.set(admin, 'capability', ','.join(self.capab))
self.conf.set(admin, 'isonick', ','.join(self.ison))
if self.logins > 0:
self.conf.set(admin, 'logins', str(self.logins))
if self.hash_algorithm != 'md5':
self.conf.set(admin, 'hash_algorithm', self.hash_algorithm)
with file(self.confpath, 'w') as cfg:
self.conf.write(cfg) | {
"content_hash": "fb418e5e37bed2b8610616c57155ec12",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 80,
"avg_line_length": 31.414893617021278,
"alnum_prop": 0.5895699288858788,
"repo_name": "IsmaelRLG/simpbot",
"id": "0105558b52a200587fbb67cf96bafc1697a62e82",
"size": "3044",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "simpbot/admins/admins.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "310649"
}
],
"symlink_target": ""
} |
""" Standard plug-in to make dill module work for compiled stuff.
"""
from nuitka.plugins.PluginBase import NuitkaPluginBase
class NuitkaPluginDillWorkarounds(NuitkaPluginBase):
"""This is to make dill module work with compiled methods."""
plugin_name = "dill-compat"
@staticmethod
def isAlwaysEnabled():
return False
@staticmethod
def createPostModuleLoadCode(module):
full_name = module.getFullName()
if full_name == "dill":
code = r"""\
import dill._dill
# Compiled methods need to be created.
@dill.register(compiled_method)
def save_compiled_method(pickler, obj):
if str is not bytes:
pickler.save_reduce(compiled_method, (obj.__func__, obj.__self__), obj=obj)
else:
pickler.save_reduce(compiled_method, (obj.im_func, obj.im_self, obj.im_class), obj=obj)
def _create_compiled_function2(module_name, func_values, func_dict, func_defaults):
if module_name not in compiled_function_tables:
__import__(module_name)
func = compiled_function_tables[module_name][1](*func_values)
if func_dict:
for key, value in func_dict.items():
func[key] = value
func.__defaults__ = func_defaults
return func
def _create_compiled_function3(module_name, func_values, func_dict, func_defaults, func_kwdefaults):
if module_name not in compiled_function_tables:
__import__(module_name)
func = compiled_function_tables[module_name][1](*func_values)
if func_dict:
for key, value in func_dict.items():
func[key] = value
func.__defaults__ = func_defaults
func.__kwdefaults__ = func_kwdefaults
return func
# Compiled methods might have to be created or not.
@dill.register(compiled_function)
def save_compiled_function(pickler, obj):
if not dill._dill._locate_function(obj):
stack = dill._dill.stack
if getattr(pickler, '_recurse', False):
from dill.detect import globalvars
globs = globalvars(obj, recurse=True, builtin=True)
if id(obj) in stack:
globs = obj.__globals__ if str is not bytes else obj.func_globals
else:
globs = obj.__globals__ if str is not bytes else obj.func_globals
_byref = getattr(pickler, '_byref', None)
_recurse = getattr(pickler, '_recurse', None)
_memo = (id(obj) in stack) and (_recurse is not None)
stack[id(obj)] = len(stack), obj
if str is not bytes:
# Python3
_super = ('super' in getattr(obj.__code__,'co_names',())) and (_byref is not None)
if _super: pickler._byref = True
if _memo: pickler._recurse = False
pickler.save_reduce(
_create_compiled_function3,
(
obj.__module__,
compiled_function_tables[obj.__module__][0](obj),
obj.__dict__,
obj.__defaults__,
obj.__kwdefaults__
)
)
else:
# Python2
_super = ('super' in getattr(obj.__code__,'co_names',())) and (_byref is not None) and getattr(pickler, '_recurse', False)
if _super: pickler._byref = True
if _memo: pickler._recurse = False
pickler.save_reduce(
_create_compiled_function2,
(
obj.__module__,
compiled_function_tables[obj.__module__][0](obj),
obj.__dict__,
obj.__defaults__
)
)
if _super: pickler._byref = _byref
if _memo: pickler._recurse = _recurse
else:
dill._dill.StockPickler.save_global(pickler, obj)
"""
return (
code,
"""\
Extending "dill" for compiled types to be pickable as well.""",
)
@staticmethod
def getPreprocessorSymbols():
return {"_NUITKA_PLUGIN_DILL_ENABLED": "1"}
def getExtraCodeFiles(self):
return {"DillPlugin.c": extra_code}
extra_code = r"""
#include "nuitka/prelude.h"
void registerDillPluginTables(char const *module_name, PyMethodDef *reduce_compiled_function, PyMethodDef *create_compiled_function) {
PyObject *function_tables = PyObject_GetAttrString((PyObject *)builtin_module, "compiled_function_tables");
if (function_tables == NULL) {
DROP_ERROR_OCCURRED();
function_tables = PyDict_New();
PyObject_SetAttrString((PyObject *)builtin_module, "compiled_function_tables", function_tables);
}
PyObject *funcs = PyTuple_New(2);
PyTuple_SET_ITEM(funcs, 0, PyCFunction_New(reduce_compiled_function, NULL));
PyTuple_SET_ITEM(funcs, 1, PyCFunction_New(create_compiled_function, NULL));
PyDict_SetItemString(function_tables, module_name, funcs);
}
"""
| {
"content_hash": "5fb4b99937fb393ad32f5703764bbea2",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 134,
"avg_line_length": 32.6,
"alnum_prop": 0.5932515337423313,
"repo_name": "kayhayen/Nuitka",
"id": "8e94e2d4daf24d697129bf1d1a4b2ea9f48b6fa0",
"size": "5670",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "nuitka/plugins/standard/DillPlugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1868"
},
{
"name": "C",
"bytes": "617681"
},
{
"name": "C++",
"bytes": "149777"
},
{
"name": "Python",
"bytes": "6603718"
},
{
"name": "Shell",
"bytes": "1088"
}
],
"symlink_target": ""
} |
palavra = input('Digite uma palavra: ')
if palavra == palavra[::-1]:
print('A palavra "%s" é palindrome' % palavra) #palavra invertida igual
else:
print('A palavra "%s" nao e palindrome' % palavra)
| {
"content_hash": "26f514130a5b60e90d1266c78d70ae91",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 75,
"avg_line_length": 35.666666666666664,
"alnum_prop": 0.6401869158878505,
"repo_name": "kidchenko/playground",
"id": "be4204be804b36afc2b33723ccf5337c023f9ac1",
"size": "214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python-para-zumbis/slice_01.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ABAP",
"bytes": "8568"
},
{
"name": "ASP.NET",
"bytes": "98"
},
{
"name": "Ada",
"bytes": "7270"
},
{
"name": "Batchfile",
"bytes": "12383"
},
{
"name": "C",
"bytes": "9517"
},
{
"name": "C#",
"bytes": "1171050"
},
{
"name": "C++",
"bytes": "728997"
},
{
"name": "CMake",
"bytes": "9116"
},
{
"name": "COBOL",
"bytes": "4067"
},
{
"name": "CSS",
"bytes": "66432"
},
{
"name": "Common Lisp",
"bytes": "11582"
},
{
"name": "D",
"bytes": "4258"
},
{
"name": "Dart",
"bytes": "2148"
},
{
"name": "Dockerfile",
"bytes": "13639"
},
{
"name": "Elixir",
"bytes": "4570"
},
{
"name": "Elm",
"bytes": "2363"
},
{
"name": "Erlang",
"bytes": "6106"
},
{
"name": "F#",
"bytes": "8361"
},
{
"name": "Fortran",
"bytes": "5005"
},
{
"name": "Gherkin",
"bytes": "203"
},
{
"name": "Go",
"bytes": "5312"
},
{
"name": "Groovy",
"bytes": "4478"
},
{
"name": "HTML",
"bytes": "358425"
},
{
"name": "Haskell",
"bytes": "3880"
},
{
"name": "Java",
"bytes": "615231"
},
{
"name": "JavaScript",
"bytes": "6232832"
},
{
"name": "Kotlin",
"bytes": "86837"
},
{
"name": "LFE",
"bytes": "7144"
},
{
"name": "Makefile",
"bytes": "3689"
},
{
"name": "OCaml",
"bytes": "343"
},
{
"name": "PHP",
"bytes": "5219"
},
{
"name": "PLSQL",
"bytes": "3927"
},
{
"name": "PLpgSQL",
"bytes": "11008"
},
{
"name": "Pascal",
"bytes": "16226"
},
{
"name": "Perl",
"bytes": "4384"
},
{
"name": "PowerShell",
"bytes": "133352"
},
{
"name": "Python",
"bytes": "12507"
},
{
"name": "R",
"bytes": "3160"
},
{
"name": "Raku",
"bytes": "3281"
},
{
"name": "Ruby",
"bytes": "3370"
},
{
"name": "Rust",
"bytes": "3871"
},
{
"name": "SCSS",
"bytes": "8375"
},
{
"name": "Scala",
"bytes": "3454"
},
{
"name": "Scheme",
"bytes": "5368"
},
{
"name": "Shell",
"bytes": "13402"
},
{
"name": "Smalltalk",
"bytes": "8897"
},
{
"name": "Smarty",
"bytes": "3440"
},
{
"name": "Standard ML",
"bytes": "191"
},
{
"name": "Swift",
"bytes": "4487"
},
{
"name": "TypeScript",
"bytes": "202420"
},
{
"name": "Visual Basic .NET",
"bytes": "5245"
},
{
"name": "Vue",
"bytes": "1985"
},
{
"name": "XSLT",
"bytes": "19568"
}
],
"symlink_target": ""
} |
'''
Test bundled filters
'''
import unittest, tempfile, shutil
from ansible import playbook, inventory, callbacks
INVENTORY = inventory.Inventory(['localhost'])
BOOK = '''
- hosts: localhost
vars:
var: { a: [1,2,3] }
tasks:
- template: src=%s dest=%s
'''
SRC = '''
-
{{ var|to_json }}
-
{{ var|to_nice_json }}
-
{{ var|to_yaml }}
-
{{ var|to_nice_yaml }}
'''
DEST = '''
-
{"a": [1, 2, 3]}
-
{
"a": [
1,
2,
3
]
}
-
a: [1, 2, 3]
-
a:
- 1
- 2
- 3
'''
class TestFilters(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp(dir='/tmp')
def tearDown(self):
shutil.rmtree(self.tmpdir)
def temp(self, name, data=''):
'''write a temporary file and return the name'''
name = self.tmpdir + '/' + name
with open(name, 'w') as f:
f.write(data)
return name
#def test_filters(self):
# this test is pretty low level using a playbook, hence I am disabling it for now -- MPD.
#return
#src = self.temp('src.j2', SRC)
#dest = self.temp('dest.txt')
#book = self.temp('book', BOOK % (src, dest))
#playbook.PlayBook(
# playbook = book,
# inventory = INVENTORY,
# transport = 'local',
# callbacks = callbacks.PlaybookCallbacks(),
# runner_callbacks = callbacks.DefaultRunnerCallbacks(),
# stats = callbacks.AggregateStats(),
#).run()
out = open(dest).read()
self.assertEqual(DEST, out)
| {
"content_hash": "56d0f90da8e8b78f73aa112f5504ed82",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 97,
"avg_line_length": 18.270588235294117,
"alnum_prop": 0.5325177076625885,
"repo_name": "dlab-berkeley/collaboratool-archive",
"id": "4bd5b32fa8dcefe0a345080dfccc1ca211883a7a",
"size": "1553",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "bsd2/vagrant-ansible/ansible/test/TestFilters.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "18154"
},
{
"name": "JavaScript",
"bytes": "30509"
},
{
"name": "Perl",
"bytes": "23315"
},
{
"name": "Puppet",
"bytes": "2252"
},
{
"name": "Python",
"bytes": "684123"
},
{
"name": "Ruby",
"bytes": "11103"
},
{
"name": "Shell",
"bytes": "6980"
}
],
"symlink_target": ""
} |
"""Support for Apple HomeKit."""
import ipaddress
import logging
from zlib import adler32
import voluptuous as vol
from homeassistant.components import cover
from homeassistant.components.media_player import DEVICE_CLASS_TV
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
ATTR_UNIT_OF_MEASUREMENT,
CONF_IP_ADDRESS,
CONF_NAME,
CONF_PORT,
CONF_TYPE,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_TEMPERATURE,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entityfilter import FILTER_SCHEMA
from homeassistant.util import get_local_ip
from homeassistant.util.decorator import Registry
from .const import (
BRIDGE_NAME,
CONF_ADVERTISE_IP,
CONF_AUTO_START,
CONF_ENTITY_CONFIG,
CONF_FEATURE_LIST,
CONF_FILTER,
CONF_SAFE_MODE,
DEFAULT_AUTO_START,
DEFAULT_PORT,
DEFAULT_SAFE_MODE,
DEVICE_CLASS_CO,
DEVICE_CLASS_CO2,
DEVICE_CLASS_PM25,
DOMAIN,
HOMEKIT_FILE,
SERVICE_HOMEKIT_START,
SERVICE_HOMEKIT_RESET_ACCESSORY,
TYPE_FAUCET,
TYPE_OUTLET,
TYPE_SHOWER,
TYPE_SPRINKLER,
TYPE_SWITCH,
TYPE_VALVE,
)
from .util import (
show_setup_message,
validate_entity_config,
validate_media_player_features,
)
_LOGGER = logging.getLogger(__name__)
MAX_DEVICES = 100
TYPES = Registry()
# #### Driver Status ####
STATUS_READY = 0
STATUS_RUNNING = 1
STATUS_STOPPED = 2
STATUS_WAIT = 3
SWITCH_TYPES = {
TYPE_FAUCET: "Valve",
TYPE_OUTLET: "Outlet",
TYPE_SHOWER: "Valve",
TYPE_SPRINKLER: "Valve",
TYPE_SWITCH: "Switch",
TYPE_VALVE: "Valve",
}
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
{
vol.Optional(CONF_NAME, default=BRIDGE_NAME): vol.All(
cv.string, vol.Length(min=3, max=25)
),
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_IP_ADDRESS): vol.All(ipaddress.ip_address, cv.string),
vol.Optional(CONF_ADVERTISE_IP): vol.All(
ipaddress.ip_address, cv.string
),
vol.Optional(CONF_AUTO_START, default=DEFAULT_AUTO_START): cv.boolean,
vol.Optional(CONF_SAFE_MODE, default=DEFAULT_SAFE_MODE): cv.boolean,
vol.Optional(CONF_FILTER, default={}): FILTER_SCHEMA,
vol.Optional(CONF_ENTITY_CONFIG, default={}): validate_entity_config,
}
)
},
extra=vol.ALLOW_EXTRA,
)
RESET_ACCESSORY_SERVICE_SCHEMA = vol.Schema(
{vol.Required(ATTR_ENTITY_ID): cv.entity_ids}
)
async def async_setup(hass, config):
"""Set up the HomeKit component."""
_LOGGER.debug("Begin setup HomeKit")
conf = config[DOMAIN]
name = conf[CONF_NAME]
port = conf[CONF_PORT]
ip_address = conf.get(CONF_IP_ADDRESS)
advertise_ip = conf.get(CONF_ADVERTISE_IP)
auto_start = conf[CONF_AUTO_START]
safe_mode = conf[CONF_SAFE_MODE]
entity_filter = conf[CONF_FILTER]
entity_config = conf[CONF_ENTITY_CONFIG]
homekit = HomeKit(
hass,
name,
port,
ip_address,
entity_filter,
entity_config,
safe_mode,
advertise_ip,
)
await hass.async_add_executor_job(homekit.setup)
def handle_homekit_reset_accessory(service):
"""Handle start HomeKit service call."""
if homekit.status != STATUS_RUNNING:
_LOGGER.warning(
"HomeKit is not running. Either it is waiting to be "
"started or has been stopped."
)
return
entity_ids = service.data.get("entity_id")
homekit.reset_accessories(entity_ids)
hass.services.async_register(
DOMAIN,
SERVICE_HOMEKIT_RESET_ACCESSORY,
handle_homekit_reset_accessory,
schema=RESET_ACCESSORY_SERVICE_SCHEMA,
)
if auto_start:
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, homekit.start)
return True
def handle_homekit_service_start(service):
"""Handle start HomeKit service call."""
if homekit.status != STATUS_READY:
_LOGGER.warning(
"HomeKit is not ready. Either it is already running or has "
"been stopped."
)
return
homekit.start()
hass.services.async_register(
DOMAIN, SERVICE_HOMEKIT_START, handle_homekit_service_start
)
return True
def get_accessory(hass, driver, state, aid, config):
"""Take state and return an accessory object if supported."""
if not aid:
_LOGGER.warning(
'The entity "%s" is not supported, since it '
"generates an invalid aid, please change it.",
state.entity_id,
)
return None
a_type = None
name = config.get(CONF_NAME, state.name)
if state.domain == "alarm_control_panel":
a_type = "SecuritySystem"
elif state.domain in ("binary_sensor", "device_tracker", "person"):
a_type = "BinarySensor"
elif state.domain == "climate":
a_type = "Thermostat"
elif state.domain == "cover":
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if device_class == "garage" and features & (
cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE
):
a_type = "GarageDoorOpener"
elif features & cover.SUPPORT_SET_POSITION:
a_type = "WindowCovering"
elif features & (cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE):
a_type = "WindowCoveringBasic"
elif state.domain == "fan":
a_type = "Fan"
elif state.domain == "light":
a_type = "Light"
elif state.domain == "lock":
a_type = "Lock"
elif state.domain == "media_player":
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
feature_list = config.get(CONF_FEATURE_LIST)
if device_class == DEVICE_CLASS_TV:
a_type = "TelevisionMediaPlayer"
else:
if feature_list and validate_media_player_features(state, feature_list):
a_type = "MediaPlayer"
elif state.domain == "sensor":
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
if device_class == DEVICE_CLASS_TEMPERATURE or unit in (
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
):
a_type = "TemperatureSensor"
elif device_class == DEVICE_CLASS_HUMIDITY and unit == "%":
a_type = "HumiditySensor"
elif device_class == DEVICE_CLASS_PM25 or DEVICE_CLASS_PM25 in state.entity_id:
a_type = "AirQualitySensor"
elif device_class == DEVICE_CLASS_CO:
a_type = "CarbonMonoxideSensor"
elif device_class == DEVICE_CLASS_CO2 or DEVICE_CLASS_CO2 in state.entity_id:
a_type = "CarbonDioxideSensor"
elif device_class == DEVICE_CLASS_ILLUMINANCE or unit in ("lm", "lx"):
a_type = "LightSensor"
elif state.domain == "switch":
switch_type = config.get(CONF_TYPE, TYPE_SWITCH)
a_type = SWITCH_TYPES[switch_type]
elif state.domain in ("automation", "input_boolean", "remote", "scene", "script"):
a_type = "Switch"
elif state.domain == "water_heater":
a_type = "WaterHeater"
if a_type is None:
return None
_LOGGER.debug('Add "%s" as "%s"', state.entity_id, a_type)
return TYPES[a_type](hass, driver, name, state.entity_id, aid, config)
def generate_aid(entity_id):
"""Generate accessory aid with zlib adler32."""
aid = adler32(entity_id.encode("utf-8"))
if aid in (0, 1):
return None
return aid
class HomeKit:
"""Class to handle all actions between HomeKit and Home Assistant."""
def __init__(
self,
hass,
name,
port,
ip_address,
entity_filter,
entity_config,
safe_mode,
advertise_ip=None,
):
"""Initialize a HomeKit object."""
self.hass = hass
self._name = name
self._port = port
self._ip_address = ip_address
self._filter = entity_filter
self._config = entity_config
self._safe_mode = safe_mode
self._advertise_ip = advertise_ip
self.status = STATUS_READY
self.bridge = None
self.driver = None
def setup(self):
"""Set up bridge and accessory driver."""
from .accessories import HomeBridge, HomeDriver
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self.stop)
ip_addr = self._ip_address or get_local_ip()
path = self.hass.config.path(HOMEKIT_FILE)
self.driver = HomeDriver(
self.hass,
address=ip_addr,
port=self._port,
persist_file=path,
advertised_address=self._advertise_ip,
)
self.bridge = HomeBridge(self.hass, self.driver, self._name)
if self._safe_mode:
_LOGGER.debug("Safe_mode selected")
self.driver.safe_mode = True
def reset_accessories(self, entity_ids):
"""Reset the accessory to load the latest configuration."""
removed = []
for entity_id in entity_ids:
aid = generate_aid(entity_id)
if aid not in self.bridge.accessories:
_LOGGER.warning(
"Could not reset accessory. entity_id " "not found %s", entity_id
)
continue
acc = self.remove_bridge_accessory(aid)
removed.append(acc)
self.driver.config_changed()
for acc in removed:
self.bridge.add_accessory(acc)
self.driver.config_changed()
def add_bridge_accessory(self, state):
"""Try adding accessory to bridge if configured beforehand."""
if not state or not self._filter(state.entity_id):
return
aid = generate_aid(state.entity_id)
conf = self._config.pop(state.entity_id, {})
acc = get_accessory(self.hass, self.driver, state, aid, conf)
if acc is not None:
self.bridge.add_accessory(acc)
def remove_bridge_accessory(self, aid):
"""Try adding accessory to bridge if configured beforehand."""
acc = None
if aid in self.bridge.accessories:
acc = self.bridge.accessories.pop(aid)
return acc
def start(self, *args):
"""Start the accessory driver."""
if self.status != STATUS_READY:
return
self.status = STATUS_WAIT
# pylint: disable=unused-import
from . import ( # noqa: F401
type_covers,
type_fans,
type_lights,
type_locks,
type_media_players,
type_security_systems,
type_sensors,
type_switches,
type_thermostats,
)
for state in self.hass.states.all():
self.add_bridge_accessory(state)
self.driver.add_accessory(self.bridge)
if not self.driver.state.paired:
show_setup_message(self.hass, self.driver.state.pincode)
if len(self.bridge.accessories) > MAX_DEVICES:
_LOGGER.warning(
"You have exceeded the device limit, which might "
"cause issues. Consider using the filter option."
)
_LOGGER.debug("Driver start")
self.hass.add_job(self.driver.start)
self.status = STATUS_RUNNING
def stop(self, *args):
"""Stop the accessory driver."""
if self.status != STATUS_RUNNING:
return
self.status = STATUS_STOPPED
_LOGGER.debug("Driver stop")
self.hass.add_job(self.driver.stop)
| {
"content_hash": "104be40a982774e80da91d3726be1857",
"timestamp": "",
"source": "github",
"line_count": 402,
"max_line_length": 88,
"avg_line_length": 30,
"alnum_prop": 0.5961028192371476,
"repo_name": "qedi-r/home-assistant",
"id": "bb525271cec3e77dc62eafc5c186ac67a650cd7c",
"size": "12060",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/homekit/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18564720"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
} |
import sqlite3
import urllib
import re
from urllib.request import urlopen
from bs4 import BeautifulSoup
from phyllo.phyllo_logger import logger
# this works!
def getBooks(soup):
siteURL = 'http://www.thelatinlibrary.com'
textsURL = []
# get links to books in the collection
for a in soup.find_all('a', href=True):
link = a['href']
textsURL.append("{}/{}".format(siteURL, a['href']))
# remove unnecessary URLs
while ("http://www.thelatinlibrary.com/index.html" in textsURL):
textsURL.remove("http://www.thelatinlibrary.com/index.html")
textsURL.remove("http://www.thelatinlibrary.com/classics.html")
textsURL.remove("http://www.thelatinlibrary.com/misc.html")
logger.info("\n".join(textsURL))
return textsURL
def main():
# The collection URL below.
collURL = 'http://thelatinlibrary.com/hyginus.html'
collOpen = urllib.request.urlopen(collURL)
collSOUP = BeautifulSoup(collOpen, 'html5lib')
author = collSOUP.title.string.strip()
colltitle = 'GAIVS JULIVS HYGINVS'
date = collSOUP.span.string.strip().replace('(', '').replace(')', '').replace(u"\u2013", '-')
textsURL = getBooks(collSOUP)
with sqlite3.connect('texts.db') as db:
c = db.cursor()
c.execute(
'CREATE TABLE IF NOT EXISTS texts (id INTEGER PRIMARY KEY, title TEXT, book TEXT,'
' language TEXT, author TEXT, date TEXT, chapter TEXT, verse TEXT, passage TEXT,'
' link TEXT, documentType TEXT)')
c.execute("DELETE FROM texts WHERE author = 'Hyginus'")
for url in textsURL:
openurl = urllib.request.urlopen(url)
textsoup = BeautifulSoup(openurl, 'html5lib')
title = textsoup.title.string.split(':')[1].strip()
chapter = -1
verse = 0
if title.startswith("de Astronomia"):
getp = textsoup.find_all('p')
for p in getp:
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallboarder', 'margin',
'internal_navigation']: # these are not part of the main t
continue
except:
pass
verses = []
chapter_f = p.find('b')
if chapter_f is not None:
chapter = chapter_f.string.strip()
verse = 0
if p.find('b') is not None:
try:
text = p.find('b').next_sibling.next_sibling.strip()
except:
text = p.find('b').next_sibling.strip()
verses.append(text)
else:
text = p.get_text()
text = text.strip()
verses.append(text)
for v in verses:
if v.startswith('Hyginus'):
continue
# verse number assignment.
verse += 1
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, v.strip(), url, 'prose'))
elif title.startswith("Fabulae"):
getp = textsoup.find_all('p')
for p in getp:
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallboarder', 'margin',
'internal_navigation']: # these are not part of the main text
continue
except:
pass
verses = []
chapter_f = p.find('b')
if chapter_f is not None:
chapter = chapter_f.string.strip()
verse = 0
continue
else:
text = p.get_text()
text = text.strip()
verses.append(text)
for v in verses:
if v.startswith('Hyginus'):
continue
if v is None or v == '' or v.isspace():
continue
# verse number assignment.
verse += 1
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, v.strip(), url, 'prose'))
else:
getp = textsoup.find_all('p')
for p in getp:
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallboarder', 'margin',
'internal_navigation']: # these are not part of the main text
continue
except:
pass
verses = []
text = p.get_text()
try:
text = text.split(r"[0-9]+\.")[1].strip()
except:
text = text.strip()
verses.append(text)
for v in verses:
if v.startswith('Hyginus'):
continue
# verse number assignment.
verse += 1
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, v.strip(), url, 'prose'))
if __name__ == '__main__':
main()
| {
"content_hash": "5be851e4e6ea0917f9ed3a5bcae7f664",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 115,
"avg_line_length": 40.97986577181208,
"alnum_prop": 0.43137897150343923,
"repo_name": "oudalab/phyllo",
"id": "ee2c75a4076da86e85de5b70478ecaf9da0caddf",
"size": "6106",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phyllo/extractors/hyginusDB.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "919"
},
{
"name": "HTML",
"bytes": "3428"
},
{
"name": "Python",
"bytes": "1253920"
},
{
"name": "Shell",
"bytes": "1077"
}
],
"symlink_target": ""
} |
"""idx requests by status, completed
Revision ID: 7f4a3b8c55d
Revises: 4b96dd9974bb
Create Date: 2014-03-31 17:39:40.858182
"""
# revision identifiers, used by Alembic.
revision = '7f4a3b8c55d'
down_revision = '4b96dd9974bb'
from alembic import op
def upgrade():
op.create_index('idx_status_complete', 'requests', ['status_code', 'completed_date'])
def downgrade():
op.drop_index('idx_status_complete', 'requests')
| {
"content_hash": "5915099df62e1600836f5e23d264b401",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 89,
"avg_line_length": 20.523809523809526,
"alnum_prop": 0.7215777262180975,
"repo_name": "holmes-app/holmes-api",
"id": "8dba792a09be4e60c427faccb75b0363a19b447f",
"size": "431",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "holmes/migrations/versions/7f4a3b8c55d_idx_requests_by_status_completed.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "212454"
},
{
"name": "Makefile",
"bytes": "11334"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "809395"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, unicode_literals
import datetime
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, "REST_FRAMEWORK_SSO", None)
DEFAULTS = {
"CREATE_SESSION_PAYLOAD": "rest_framework_sso.utils.create_session_payload",
"CREATE_AUTHORIZATION_PAYLOAD": "rest_framework_sso.utils.create_authorization_payload",
"ENCODE_JWT_TOKEN": "rest_framework_sso.utils.encode_jwt_token",
"DECODE_JWT_TOKEN": "rest_framework_sso.utils.decode_jwt_token",
"AUTHENTICATE_PAYLOAD": "rest_framework_sso.utils.authenticate_payload",
"ENCODE_ALGORITHM": "RS256",
"DECODE_ALGORITHMS": None,
"VERIFY_SIGNATURE": True,
"VERIFY_EXPIRATION": True,
"VERIFY_ISSUER": True,
"VERIFY_AUDIENCE": True,
"VERIFY_SESSION_TOKEN": True,
"EXPIRATION_LEEWAY": 0,
"SESSION_EXPIRATION": None,
"AUTHORIZATION_EXPIRATION": datetime.timedelta(seconds=300),
"IDENTITY": None,
"SESSION_AUDIENCE": None,
"AUTHORIZATION_AUDIENCE": None,
"ACCEPTED_ISSUERS": None,
"KEY_STORE_ROOT": None,
"PUBLIC_KEYS": {},
"PRIVATE_KEYS": {},
"AUTHENTICATE_HEADER": "JWT",
}
# List of settings that may be in string import notation.
IMPORT_STRINGS = (
"CREATE_SESSION_PAYLOAD",
"CREATE_AUTHORIZATION_PAYLOAD",
"ENCODE_JWT_TOKEN",
"DECODE_JWT_TOKEN",
"AUTHENTICATE_PAYLOAD",
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
def reload_api_settings(*args, **kwargs):
global api_settings
setting, value = kwargs["setting"], kwargs["value"]
if setting == "REST_FRAMEWORK_SSO":
api_settings = APISettings(value, DEFAULTS, IMPORT_STRINGS)
setting_changed.connect(reload_api_settings)
| {
"content_hash": "37820b7b136c097473d0fbd9adade7ee",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 92,
"avg_line_length": 31.75438596491228,
"alnum_prop": 0.7027624309392265,
"repo_name": "namespace-ee/django-rest-framework-sso",
"id": "6c3710eca70397e9a93611600e0ac0724f1d3307",
"size": "1826",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rest_framework_sso/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "32649"
},
{
"name": "Shell",
"bytes": "73"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from collections import namedtuple
from pants.backend.jvm.subsystems.jvm_tool_mixin import JvmToolMixin
from pants.backend.jvm.subsystems.zinc_language_mixin import ZincLanguageMixin
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.build_graph.address import Address
from pants.java.jar.jar_dependency import JarDependency
from pants.subsystem.subsystem import Subsystem
# full_version - the full scala version to use.
major_version_info = namedtuple('major_version_info', ['full_version'])
# Note that the compiler has two roles here: as a tool (invoked by the compile task), and as a
# runtime library (when compiling plugins, which require the compiler library as a dependency).
scala_build_info = {
'2.10': major_version_info(full_version='2.10.6'),
'2.11': major_version_info(full_version='2.11.8'),
'2.12': major_version_info(full_version='2.12.0'),
}
# Because scalastyle inspects only the sources, it needn't match the platform version.
scala_style_jar = JarDependency('org.scalastyle', 'scalastyle_2.11', '0.8.0')
class ScalaPlatform(JvmToolMixin, ZincLanguageMixin, Subsystem):
"""A scala platform.
:API: public
"""
options_scope = 'scala-platform'
@classmethod
def _create_jardep(cls, name, version):
return JarDependency(org='org.scala-lang',
name=name,
rev=scala_build_info[version].full_version)
@classmethod
def _create_runtime_jardep(cls, version):
return cls._create_jardep('scala-library', version)
@classmethod
def _create_compiler_jardep(cls, version):
return cls._create_jardep('scala-compiler', version)
@classmethod
def _key_for_tool_version(cls, tool, version):
if version == 'custom':
return tool
else:
return '{}_{}'.format(tool, version.replace('.', '_'))
@classmethod
def register_options(cls, register):
def register_scala_compiler_tool(version):
cls.register_jvm_tool(register,
cls._key_for_tool_version('scalac', version),
classpath=[cls._create_compiler_jardep(version)])
def register_scala_repl_tool(version, with_jline=False):
classpath = [cls._create_compiler_jardep(version)] # Note: the REPL is in the compiler jar.
if with_jline:
jline_dep = JarDependency(
org = 'org.scala-lang',
name = 'jline',
rev = scala_build_info[version].full_version
)
classpath.append(jline_dep)
cls.register_jvm_tool(register,
cls._key_for_tool_version('scala-repl', version),
classpath=classpath)
def register_style_tool(version):
cls.register_jvm_tool(register,
cls._key_for_tool_version('scalastyle', version),
classpath=[scala_style_jar])
super(ScalaPlatform, cls).register_options(register)
register('--version', advanced=True, default='2.12',
choices=['2.10', '2.11', '2.12', 'custom'], fingerprint=True,
help='The scala platform version. If --version=custom, the targets '
'//:scala-library, //:scalac, //:scala-repl and //:scalastyle will be used, '
'and must exist. Otherwise, defaults for the specified version will be used.')
register('--suffix-version', advanced=True, default=None,
help='Scala suffix to be used in `scala_jar` definitions. For example, specifying '
'`2.11` or `2.12.0-RC1` would cause `scala_jar` lookups for artifacts with '
'those suffixes.')
# Register the fixed version tools.
register_scala_compiler_tool('2.10')
register_scala_repl_tool('2.10', with_jline=True) # 2.10 repl requires jline.
register_style_tool('2.10')
register_scala_compiler_tool('2.11')
register_scala_repl_tool('2.11')
register_style_tool('2.11')
register_scala_compiler_tool('2.12')
register_scala_repl_tool('2.12')
register_style_tool('2.12')
# Register the custom tools. We provide a dummy classpath, so that register_jvm_tool won't
# require that a target with the given spec actually exist (not everyone will define custom
# scala platforms). However if the custom tool is actually resolved, we want that to
# fail with a useful error, hence the dummy jardep with rev=None.
def register_custom_tool(key):
dummy_jardep = JarDependency('missing spec', ' //:{}'.format(key))
cls.register_jvm_tool(register, cls._key_for_tool_version(key, 'custom'),
classpath=[dummy_jardep])
register_custom_tool('scalac')
register_custom_tool('scala-repl')
register_custom_tool('scalastyle')
def _tool_classpath(self, tool, products):
"""Return the proper classpath based on products and scala version."""
return self.tool_classpath_from_products(products,
self._key_for_tool_version(tool, self.version),
scope=self.options_scope)
def compiler_classpath(self, products):
return self._tool_classpath('scalac', products)
def style_classpath(self, products):
return self._tool_classpath('scalastyle', products)
@property
def version(self):
return self.get_options().version
def suffix_version(self, name):
"""Appends the platform version to the given artifact name.
Also validates that the name doesn't already end with the version.
"""
if self.version == 'custom':
suffix = self.get_options().suffix_version
if suffix:
return '{0}_{1}'.format(name, suffix)
else:
raise RuntimeError('Suffix version must be specified if using a custom scala version.'
'Suffix version is used for bootstrapping jars. If a custom '
'scala version is not specified, then the version specified in '
'--scala-platform-suffix-version is used. For example for Scala '
'2.10.7 you would use the suffix version "2.10".')
elif name.endswith(self.version):
raise ValueError('The name "{0}" should not be suffixed with the scala platform version '
'({1}): it will be added automatically.'.format(name, self.version))
return '{0}_{1}'.format(name, self.version)
@property
def repl(self):
"""Return the repl tool key."""
return self._key_for_tool_version('scala-repl', self.version)
@classmethod
def compiler_library_target_spec(cls, buildgraph):
"""Returns a target spec for the scala compiler library.
Synthesizes one into the buildgraph if necessary.
:param pants.build_graph.build_graph.BuildGraph buildgraph: buildgraph object.
:return a target spec:
"""
return cls.global_instance()._library_target_spec(buildgraph, 'scalac',
cls._create_compiler_jardep)
@classmethod
def runtime_library_target_spec(cls, buildgraph):
"""Returns a target spec for the scala runtime library.
Synthesizes one into the buildgraph if necessary.
:param pants.build_graph.build_graph.BuildGraph buildgraph: buildgraph object.
:return a target spec:
"""
return cls.global_instance()._library_target_spec(buildgraph, 'scala-library',
cls._create_runtime_jardep)
def _library_target_spec(self, buildgraph, key, create_jardep_func):
if self.version == 'custom':
return '//:{}'.format(key)
else:
synthetic_address = Address.parse('//:{}-synthetic'.format(key))
if not buildgraph.contains_address(synthetic_address):
jars = [create_jardep_func(self.version)]
buildgraph.inject_synthetic_target(synthetic_address, JarLibrary, jars=jars, scope='forced')
elif not buildgraph.get_target(synthetic_address).is_synthetic:
raise buildgraph.ManualSyntheticTargetError(synthetic_address)
return buildgraph.get_target(synthetic_address).address.spec
| {
"content_hash": "b6538acc75adb1dff87ecc75692a68e3",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 100,
"avg_line_length": 42.055837563451774,
"alnum_prop": 0.6484007242003621,
"repo_name": "peiyuwang/pants",
"id": "b4103d9ba34e25c43bc2b3a2e3c57e0bc332bdee",
"size": "8432",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/pants/backend/jvm/subsystems/scala_platform.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "781"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "1746"
},
{
"name": "HTML",
"bytes": "78744"
},
{
"name": "Java",
"bytes": "463179"
},
{
"name": "JavaScript",
"bytes": "30784"
},
{
"name": "Protocol Buffer",
"bytes": "4749"
},
{
"name": "Python",
"bytes": "5586816"
},
{
"name": "Rust",
"bytes": "168825"
},
{
"name": "Scala",
"bytes": "79707"
},
{
"name": "Shell",
"bytes": "64292"
},
{
"name": "Thrift",
"bytes": "2183"
}
],
"symlink_target": ""
} |
from django import forms
class SupportForm(forms.Form):
def __init__(self, user, *args, **kwargs):
self.user = user
super(SupportForm, self).__init__(*args, **kwargs)
email = forms.EmailField(
widget=forms.TextInput(attrs={'placeholder': 'Required'}),
required=False)
subject = forms.CharField(
initial='New Support Message',
widget=forms.TextInput(attrs={'placeholder': 'Required'}))
message = forms.CharField(
widget=forms.Textarea(attrs={'placeholder': 'Required'}))
def clean_email(self):
email = self.cleaned_data['email']
# is user is authenticated, use their account email address
if self.user.is_authenticated():
return self.user.email
# require is for non-authenticated users
if not email:
raise forms.ValidationError, 'This field is required.'
return email
| {
"content_hash": "3fa0236996a356221f10a358af69729e",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 67,
"avg_line_length": 30.766666666666666,
"alnum_prop": 0.6273022751895991,
"repo_name": "chop-dbhi/varify",
"id": "a3e86665a36af91e25796f06c08f47a51fdc640d",
"size": "923",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "varify/support/forms.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "80972"
},
{
"name": "JavaScript",
"bytes": "2399168"
},
{
"name": "Puppet",
"bytes": "14585"
},
{
"name": "Python",
"bytes": "210110"
},
{
"name": "Ruby",
"bytes": "1186"
},
{
"name": "Shell",
"bytes": "37"
}
],
"symlink_target": ""
} |
"""
.. module:: pytfa
:platform: Unix, Windows
:synopsis: Thermodynamics-based Flux Analysis
.. moduleauthor:: pyTFA team
JSON serialization
"""
import json
import numpy
from .dict import model_from_dict, model_to_dict
class MyEncoder(json.JSONEncoder):
"""
We define an encoder that takes care of the serialization of numpy types,
which are not handled by json by default
"""
def default(self, obj):
if isinstance(obj, numpy.integer):
return int(obj)
elif isinstance(obj, numpy.floating):
return float(obj)
elif isinstance(obj, numpy.ndarray):
return obj.tolist()
else:
return super(MyEncoder, self).default(obj)
def check_json_extension(filepath):
if not filepath.endswith('.json'):
filepath += '.json'
return filepath
def save_json_model(model, filepath):
filepath = check_json_extension(filepath)
obj = model_to_dict(model)
with open(filepath, 'w') as fid:
json.dump(obj, fid, cls=MyEncoder)
def load_json_model(filepath):
filepath = check_json_extension(filepath)
with open(filepath, 'r') as fid:
obj = json.load(fid)
model = model_from_dict(obj)
return model
def json_dumps_model(model):
"""
Returns a JSON dump as a string
:param model:
:return:
"""
obj = model_to_dict(model)
return json.dumps(obj,cls=MyEncoder)
def json_loads_model(s):
"""
Loads a model from a string JSON dump
:param s: JSON string
:return:
"""
obj = json.loads(s)
return model_from_dict(obj) | {
"content_hash": "a02d217174f01b92f988854f8089e215",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 77,
"avg_line_length": 21.443037974683545,
"alnum_prop": 0.6038961038961039,
"repo_name": "EPFL-LCSB/pytfa",
"id": "d863ba67cc492687ec1a384bbceb0e515df35bed",
"size": "1719",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytfa/io/json.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "135"
},
{
"name": "Dockerfile",
"bytes": "1568"
},
{
"name": "Python",
"bytes": "297754"
},
{
"name": "Shell",
"bytes": "7910"
}
],
"symlink_target": ""
} |
from hashlib import md5
import time
#from sys import stdout
start = time.time()
def part1():
global start
word = 'ffykfhsq'
intdex = 0
password = ""
# While we have not found our 8 length password
while len(password) is not 8:
# Encode our word plus our index and output hexadecimal representation
md = md5(str(word + str(intdex)).encode('utf-8')).hexdigest()
#password = md[:8] # completely unnecessary
if md.startswith('00000'): # if the first five digits are zeroes
print (time.time() - start)
password += md[5] # add the 6th digit to our found password array
# display our password
#for i in range(len(found)):
#password[i] = found[i]
#stdout.write("\r" + password)
#stdout.flush()
intdex += 1
return password
def part2():
global start
word = 'ffykfhsq'
intdex = 0
found = 0
password = [ None ] * 8
# While we have not found our 8 length password
while True:
# Encode our word plus our index and output hexadecimal representation
md = md5(str(word + str(intdex)).encode('utf-8')).hexdigest()
# if the first five digits are zeroes and from 0 to 7
if md.startswith('00000') and (ord(md[5]) >= ord('0') and ord(md[5]) <= ord('7')):
slot = int(md[5])
if password[slot] is None:
slot = int(md[5])
password[slot] = md[6] # add the 6th digit to our found password array
found += 1
print ("Found # {0} t={1:.3f} PASSWORD IS NOW: {2} added {3}".format(found, (time.time()-start), password, md[6]))
if not None in password: # if we don't have any [None] in our array
break
intdex += 1
return password | {
"content_hash": "e25eb1c255ec14428408fbceaaf69a5b",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 130,
"avg_line_length": 35.11538461538461,
"alnum_prop": 0.572289156626506,
"repo_name": "ChbShoot/advent-of-code",
"id": "d164023b17a36ad4c5945c7278302052cdfcbbaf",
"size": "1960",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "2016/day5.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14202"
}
],
"symlink_target": ""
} |
import sys
import os
import random
import getpass
import subprocess
REPO_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
REPO_NAME = os.path.basename(REPO_ROOT)
def _replace_in_file(prj_root, file_name, dict):
fpath = prj_root+"/"+file_name
with open(fpath) as f:
s = f.read()
for key in dict.keys():
s = s.replace(key, dict[key])
with open(fpath, 'w') as f:
f.write(s)
if __name__ == '__main__':
if len(sys.argv) < 1:
print "Must give environment label"
sys.exit()
PRJ_ENV = sys.argv[1]
PRJ_NAME = REPO_NAME
PRJ_ROOT = REPO_ROOT.replace(REPO_NAME, PRJ_NAME)
PRJ_DB_NAME = raw_input(u"db name for the project local db ? (defaults to project name) \n")
if len(PRJ_DB_NAME.strip()) == 0:
PRJ_DB_NAME = PRJ_NAME
PRJ_DB_USER = raw_input(u"username for the project local db ? (defaults to db name) \n")
if len(PRJ_DB_USER.strip()) == 0:
PRJ_DB_USER = PRJ_DB_NAME
PRJ_DB_PASSWORD = getpass.getpass(u"password for the project local db ? (defaults to username) \n")
if len(PRJ_DB_PASSWORD.strip()) == 0:
PRJ_DB_PASSWORD = PRJ_DB_USER
CREATE_DB = raw_input(u"you want the db created locally by me ?\n(if you plan to use vagrant say no, we'll create it on the guest later)\n[y/n]\n")
if CREATE_DB in ('y', 'yes', 'Y', 'YES'):
process = subprocess.Popen('export PGPASSWORD=%s && createdb -U %s -h localhost %s' % (PRJ_DB_PASSWORD, PRJ_DB_USER, PRJ_DB_NAME,),
shell=True, executable="/bin/bash")
_replace_in_file(PRJ_ROOT, 'etc/gunicorn.sh', {'%%PRJ_NAME%%': PRJ_NAME})
_replace_in_file(PRJ_ROOT, 'etc/nginx.conf', {'%%PRJ_NAME%%': PRJ_NAME})
_replace_in_file(PRJ_ROOT, 'etc/supervisor.conf', {'%%PRJ_NAME%%': PRJ_NAME})
_replace_in_file(PRJ_ROOT, 'fabfile.py', {'%%PRJ_NAME%%': PRJ_NAME})
_replace_in_file(PRJ_ROOT, 'website/settings/base.py', {'%%PRJ_NAME%%': PRJ_NAME})
env_file_lines = [
'export PRJ_ENV=%s' % PRJ_ENV,
'\nexport PRJ_DB_NAME=%s' % PRJ_DB_NAME,
'\nexport PRJ_DB_USER=%s' % PRJ_DB_USER,
'\nexport PRJ_DB_PASSWORD=%s' % PRJ_DB_PASSWORD,
'\nexport PRJ_SECRET_KEY="%s"' % "".join([random.choice(
"abcdefghijklmnopqrstuvwxyz0123456789!@#%^&*(-_+)") for i in range(50)]),
]
# for plugged_app_label in sys.argv[2:]:
# env_file_lines.append('\nexport PRJ_IS_%s=TRUE' % plugged_app_label.upper())
INIT_GIT = raw_input(u"you just cloned the template project ? (I will remove current git config and create it from scratch in that case) \n[y/n]\n")
if INIT_GIT in ('y', 'yes', 'Y', 'YES'):
process = subprocess.Popen('cd %s && rm -f .hgignore' % PRJ_ROOT, shell=True, executable="/bin/bash")
while process.poll() == None: pass
process = subprocess.Popen('cd %s && rm -fr .hg' % PRJ_ROOT, shell=True, executable="/bin/bash")
while process.poll() == None: pass
process = subprocess.Popen('cd %s && rm -fr .git' % PRJ_ROOT, shell=True, executable="/bin/bash")
while process.poll() == None: pass
process = subprocess.Popen('cd %s && git init' % PRJ_ROOT, shell=True, executable="/bin/bash")
while process.poll() == None: pass
PRJ_GIT_REPO = raw_input(u"repo url for the project ? (can be left empty and configured in git later) \n")
_replace_in_file(PRJ_ROOT, 'Vagrantfile', {'PRJ_GIT_REPO' : PRJ_GIT_REPO})
if PRJ_GIT_REPO:
process = subprocess.Popen('cd %s && git remote add origin %s' % (PRJ_ROOT, PRJ_GIT_REPO),
shell=True, executable="/bin/bash")
while process.poll() == None: pass
subprocess.Popen('cd %s && git add .' % PRJ_ROOT, shell=True, executable="/bin/bash")
env_file_lines.append('\nexport PRJ_GIT_REPO=%s' % PRJ_GIT_REPO)
with open(os.path.join(REPO_ROOT, '.env'), 'w') as env_file:
env_file.writelines(env_file_lines)
| {
"content_hash": "5f4f09f8426a8d1fb5624b4d5c142e8c",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 152,
"avg_line_length": 47.188235294117646,
"alnum_prop": 0.5991024682124159,
"repo_name": "b-dev/b-light-base-project",
"id": "f39dee8369760552ba7c7bdc040a5b2c0afcf291",
"size": "4011",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/bootstrap.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "39"
},
{
"name": "JavaScript",
"bytes": "45"
},
{
"name": "Python",
"bytes": "33609"
},
{
"name": "Shell",
"bytes": "7682"
}
],
"symlink_target": ""
} |
"""
Sensor for checking the status of Hue sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.hue/
"""
import asyncio
import async_timeout
import logging
import threading
from datetime import timedelta
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
DEPENDENCIES = ["hue"]
__version__ = "1.0.5"
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=0.1)
TYPE_GEOFENCE = "Geofence"
ICONS = {"SML": "mdi:run", "RWL": "mdi:remote", "ZGP": "mdi:remote"}
DEVICE_CLASSES = {"SML": "motion"}
ATTRS = {
"SML": [
"light_level",
"battery",
"last_updated",
"lx",
"dark",
"daylight",
"temperature",
"on",
"reachable",
"sensitivity",
"threshold",
],
"RWL": ["last_updated", "battery", "on", "reachable"],
"ZGP": ["last_updated"],
}
def parse_hue_api_response(sensors):
"""Take in the Hue API json response."""
data_dict = {} # The list of sensors, referenced by their hue_id.
# Loop over all keys (1,2 etc) to identify sensors and get data.
for sensor in sensors:
modelid = sensor["modelid"][0:3]
if modelid in ["RWL", "SML", "ZGP"]:
_key = modelid + "_" + sensor["uniqueid"][:-5]
if modelid == "RWL":
data_dict[_key] = parse_rwl(sensor)
elif modelid == "ZGP":
data_dict[_key] = parse_zgp(sensor)
return data_dict
def parse_zgp(response):
"""Parse the json response for a ZGPSWITCH Hue Tap."""
TAP_BUTTONS = {34: "1_click", 16: "2_click", 17: "3_click", 18: "4_click"}
press = response["state"]["buttonevent"]
if press is None:
button = "No data"
else:
button = TAP_BUTTONS[press]
data = {
"model": "ZGP",
"name": response["name"],
"state": button,
"last_updated": response["state"]["lastupdated"].split("T"),
}
return data
def parse_rwl(response):
"""Parse the json response for a RWL Hue remote."""
"""
I know it should be _released not _up
but _hold_up is too good to miss isn't it
"""
responsecodes = {"0": "_click", "1": "_hold", "2": "_click_up", "3": "_hold_up"}
button = None
if response["state"]["buttonevent"]:
press = str(response["state"]["buttonevent"])
button = str(press)[0] + responsecodes[press[-1]]
data = {
"model": "RWL",
"name": response["name"],
"state": button,
"battery": response["config"]["battery"],
"on": response["config"]["on"],
"reachable": response["config"]["reachable"],
"last_updated": response["state"]["lastupdated"].split("T"),
}
return data
def get_bridges(hass):
from homeassistant.components import hue
from homeassistant.components.hue.bridge import HueBridge
return [
entry
for entry in hass.data[hue.DOMAIN].values()
if isinstance(entry, HueBridge) and entry.api
]
async def update_api(api):
import aiohue
try:
with async_timeout.timeout(10):
await api.update()
except (asyncio.TimeoutError, aiohue.AiohueException) as err:
_LOGGER.debug("Failed to fetch sensors: %s", err)
return False
return True
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Initialise Hue Bridge connection."""
data = HueSensorData(hass, async_add_entities)
await data.async_update_info()
async_track_time_interval(hass, data.async_update_info, SCAN_INTERVAL)
class HueSensorData(object):
"""Get the latest sensor data."""
def __init__(self, hass, async_add_entities):
"""Initialize the data object."""
self.hass = hass
self.lock = threading.Lock()
self.data = {}
self.sensors = {}
self.async_add_entities = async_add_entities
async def update_bridge(self, bridge):
available = await update_api(bridge.api.sensors)
if not available:
return
data = parse_hue_api_response(
sensor.raw
for sensor in bridge.api.sensors.values()
if sensor.type != TYPE_GEOFENCE
)
new_sensors = data.keys() - self.data.keys()
updated_sensors = []
for key, new in data.items():
new['changed'] = True
old = self.data.get(key)
if not old or old == new:
continue
updated_sensors.append(key)
if (
old["last_updated"] == new["last_updated"]
and old["state"] == new["state"]
):
new['changed'] = False
self.data.update(data)
new_entities = {
entity_id: HueSensor(entity_id, self) for entity_id in new_sensors
}
if new_entities:
_LOGGER.debug("Created %s", ", ".join(new_entities.keys()))
self.sensors.update(new_entities)
self.async_add_entities(new_entities.values(), True)
for entity_id in updated_sensors:
self.sensors[entity_id].async_schedule_update_ha_state()
async def async_update_info(self, now=None):
"""Get the bridge info."""
locked = self.lock.acquire(False)
if not locked:
return
try:
bridges = get_bridges(self.hass)
if not bridges:
if now:
# periodic task
await asyncio.sleep(5)
return
await asyncio.wait(
[self.update_bridge(bridge) for bridge in bridges], loop=self.hass.loop
)
finally:
self.lock.release()
class HueSensor(Entity):
"""Class to hold Hue Sensor basic info."""
ICON = "mdi:run-fast"
def __init__(self, hue_id, data):
"""Initialize the sensor object."""
self._hue_id = hue_id
self._data = data.data # data is in .data
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the sensor."""
data = self._data.get(self._hue_id)
if data:
return data["name"]
@property
def state(self):
"""Return the state of the sensor."""
data = self._data.get(self._hue_id)
if data and data["changed"]:
return data["state"]
@property
def icon(self):
"""Icon to use in the frontend, if any."""
data = self._data.get(self._hue_id)
if data:
icon = ICONS.get(data["model"])
if icon:
return icon
return self.ICON
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
data = self._data.get(self._hue_id)
if data:
device_class = DEVICE_CLASSES.get(data["model"])
if device_class:
return device_class
@property
def device_state_attributes(self):
"""Attributes."""
data = self._data.get(self._hue_id)
if data:
return {key: data.get(key) for key in ATTRS.get(data["model"], [])}
| {
"content_hash": "c0b49422963501c4545efbf4f1ea14ac",
"timestamp": "",
"source": "github",
"line_count": 256,
"max_line_length": 87,
"avg_line_length": 29.07421875,
"alnum_prop": 0.5641542388821712,
"repo_name": "shire210/Shire-HA",
"id": "3f369fddac93fbd89e2691efca81ba76b9cba033",
"size": "7443",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "custom_components/hue_custom/sensor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3159"
},
{
"name": "HTML",
"bytes": "35689"
},
{
"name": "JavaScript",
"bytes": "15159"
},
{
"name": "Python",
"bytes": "91361"
}
],
"symlink_target": ""
} |
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class V1PodSpec(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
V1PodSpec - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'volumes': 'list[V1Volume]',
'containers': 'list[V1Container]',
'restart_policy': 'str',
'termination_grace_period_seconds': 'int',
'active_deadline_seconds': 'int',
'dns_policy': 'str',
'node_selector': 'str',
'service_account_name': 'str',
'service_account': 'str',
'node_name': 'str',
'host_network': 'bool',
'host_pid': 'bool',
'host_ipc': 'bool',
'security_context': 'V1PodSecurityContext',
'image_pull_secrets': 'list[V1LocalObjectReference]'
}
self.attribute_map = {
'volumes': 'volumes',
'containers': 'containers',
'restart_policy': 'restartPolicy',
'termination_grace_period_seconds': 'terminationGracePeriodSeconds',
'active_deadline_seconds': 'activeDeadlineSeconds',
'dns_policy': 'dnsPolicy',
'node_selector': 'nodeSelector',
'service_account_name': 'serviceAccountName',
'service_account': 'serviceAccount',
'node_name': 'nodeName',
'host_network': 'hostNetwork',
'host_pid': 'hostPID',
'host_ipc': 'hostIPC',
'security_context': 'securityContext',
'image_pull_secrets': 'imagePullSecrets'
}
self._volumes = None
self._containers = None
self._restart_policy = None
self._termination_grace_period_seconds = None
self._active_deadline_seconds = None
self._dns_policy = None
self._node_selector = None
self._service_account_name = None
self._service_account = None
self._node_name = None
self._host_network = None
self._host_pid = None
self._host_ipc = None
self._security_context = None
self._image_pull_secrets = None
@property
def volumes(self):
"""
Gets the volumes of this V1PodSpec.
List of volumes that can be mounted by containers belonging to the pod. More info: http://releases.k8s.io/HEAD/docs/user-guide/volumes.md
:return: The volumes of this V1PodSpec.
:rtype: list[V1Volume]
"""
return self._volumes
@volumes.setter
def volumes(self, volumes):
"""
Sets the volumes of this V1PodSpec.
List of volumes that can be mounted by containers belonging to the pod. More info: http://releases.k8s.io/HEAD/docs/user-guide/volumes.md
:param volumes: The volumes of this V1PodSpec.
:type: list[V1Volume]
"""
self._volumes = volumes
@property
def containers(self):
"""
Gets the containers of this V1PodSpec.
List of containers belonging to the pod. Containers cannot currently be added or removed. There must be at least one container in a Pod. Cannot be updated. More info: http://releases.k8s.io/HEAD/docs/user-guide/containers.md
:return: The containers of this V1PodSpec.
:rtype: list[V1Container]
"""
return self._containers
@containers.setter
def containers(self, containers):
"""
Sets the containers of this V1PodSpec.
List of containers belonging to the pod. Containers cannot currently be added or removed. There must be at least one container in a Pod. Cannot be updated. More info: http://releases.k8s.io/HEAD/docs/user-guide/containers.md
:param containers: The containers of this V1PodSpec.
:type: list[V1Container]
"""
self._containers = containers
@property
def restart_policy(self):
"""
Gets the restart_policy of this V1PodSpec.
Restart policy for all containers within the pod. One of Always, OnFailure, Never. Default to Always. More info: http://releases.k8s.io/HEAD/docs/user-guide/pod-states.md#restartpolicy
:return: The restart_policy of this V1PodSpec.
:rtype: str
"""
return self._restart_policy
@restart_policy.setter
def restart_policy(self, restart_policy):
"""
Sets the restart_policy of this V1PodSpec.
Restart policy for all containers within the pod. One of Always, OnFailure, Never. Default to Always. More info: http://releases.k8s.io/HEAD/docs/user-guide/pod-states.md#restartpolicy
:param restart_policy: The restart_policy of this V1PodSpec.
:type: str
"""
self._restart_policy = restart_policy
@property
def termination_grace_period_seconds(self):
"""
Gets the termination_grace_period_seconds of this V1PodSpec.
Optional duration in seconds the pod needs to terminate gracefully. May be decreased in delete request. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period will be used instead. The grace period is the duration in seconds after the processes running in the pod are sent a termination signal and the time when the processes are forcibly halted with a kill signal. Set this value longer than the expected cleanup time for your process. Defaults to 30 seconds.
:return: The termination_grace_period_seconds of this V1PodSpec.
:rtype: int
"""
return self._termination_grace_period_seconds
@termination_grace_period_seconds.setter
def termination_grace_period_seconds(self, termination_grace_period_seconds):
"""
Sets the termination_grace_period_seconds of this V1PodSpec.
Optional duration in seconds the pod needs to terminate gracefully. May be decreased in delete request. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period will be used instead. The grace period is the duration in seconds after the processes running in the pod are sent a termination signal and the time when the processes are forcibly halted with a kill signal. Set this value longer than the expected cleanup time for your process. Defaults to 30 seconds.
:param termination_grace_period_seconds: The termination_grace_period_seconds of this V1PodSpec.
:type: int
"""
self._termination_grace_period_seconds = termination_grace_period_seconds
@property
def active_deadline_seconds(self):
"""
Gets the active_deadline_seconds of this V1PodSpec.
Optional duration in seconds the pod may be active on the node relative to StartTime before the system will actively try to mark it failed and kill associated containers. Value must be a positive integer.
:return: The active_deadline_seconds of this V1PodSpec.
:rtype: int
"""
return self._active_deadline_seconds
@active_deadline_seconds.setter
def active_deadline_seconds(self, active_deadline_seconds):
"""
Sets the active_deadline_seconds of this V1PodSpec.
Optional duration in seconds the pod may be active on the node relative to StartTime before the system will actively try to mark it failed and kill associated containers. Value must be a positive integer.
:param active_deadline_seconds: The active_deadline_seconds of this V1PodSpec.
:type: int
"""
self._active_deadline_seconds = active_deadline_seconds
@property
def dns_policy(self):
"""
Gets the dns_policy of this V1PodSpec.
Set DNS policy for containers within the pod. One of 'ClusterFirst' or 'Default'. Defaults to \"ClusterFirst\".
:return: The dns_policy of this V1PodSpec.
:rtype: str
"""
return self._dns_policy
@dns_policy.setter
def dns_policy(self, dns_policy):
"""
Sets the dns_policy of this V1PodSpec.
Set DNS policy for containers within the pod. One of 'ClusterFirst' or 'Default'. Defaults to \"ClusterFirst\".
:param dns_policy: The dns_policy of this V1PodSpec.
:type: str
"""
self._dns_policy = dns_policy
@property
def node_selector(self):
"""
Gets the node_selector of this V1PodSpec.
NodeSelector is a selector which must be true for the pod to fit on a node. Selector which must match a node's labels for the pod to be scheduled on that node. More info: http://releases.k8s.io/HEAD/docs/user-guide/node-selection/README.md
:return: The node_selector of this V1PodSpec.
:rtype: str
"""
return self._node_selector
@node_selector.setter
def node_selector(self, node_selector):
"""
Sets the node_selector of this V1PodSpec.
NodeSelector is a selector which must be true for the pod to fit on a node. Selector which must match a node's labels for the pod to be scheduled on that node. More info: http://releases.k8s.io/HEAD/docs/user-guide/node-selection/README.md
:param node_selector: The node_selector of this V1PodSpec.
:type: str
"""
self._node_selector = node_selector
@property
def service_account_name(self):
"""
Gets the service_account_name of this V1PodSpec.
ServiceAccountName is the name of the ServiceAccount to use to run this pod. More info: http://releases.k8s.io/HEAD/docs/design/service_accounts.md
:return: The service_account_name of this V1PodSpec.
:rtype: str
"""
return self._service_account_name
@service_account_name.setter
def service_account_name(self, service_account_name):
"""
Sets the service_account_name of this V1PodSpec.
ServiceAccountName is the name of the ServiceAccount to use to run this pod. More info: http://releases.k8s.io/HEAD/docs/design/service_accounts.md
:param service_account_name: The service_account_name of this V1PodSpec.
:type: str
"""
self._service_account_name = service_account_name
@property
def service_account(self):
"""
Gets the service_account of this V1PodSpec.
DeprecatedServiceAccount is a depreciated alias for ServiceAccountName. Deprecated: Use serviceAccountName instead.
:return: The service_account of this V1PodSpec.
:rtype: str
"""
return self._service_account
@service_account.setter
def service_account(self, service_account):
"""
Sets the service_account of this V1PodSpec.
DeprecatedServiceAccount is a depreciated alias for ServiceAccountName. Deprecated: Use serviceAccountName instead.
:param service_account: The service_account of this V1PodSpec.
:type: str
"""
self._service_account = service_account
@property
def node_name(self):
"""
Gets the node_name of this V1PodSpec.
NodeName is a request to schedule this pod onto a specific node. If it is non-empty, the scheduler simply schedules this pod onto that node, assuming that it fits resource requirements.
:return: The node_name of this V1PodSpec.
:rtype: str
"""
return self._node_name
@node_name.setter
def node_name(self, node_name):
"""
Sets the node_name of this V1PodSpec.
NodeName is a request to schedule this pod onto a specific node. If it is non-empty, the scheduler simply schedules this pod onto that node, assuming that it fits resource requirements.
:param node_name: The node_name of this V1PodSpec.
:type: str
"""
self._node_name = node_name
@property
def host_network(self):
"""
Gets the host_network of this V1PodSpec.
Host networking requested for this pod. Use the host's network namespace. If this option is set, the ports that will be used must be specified. Default to false.
:return: The host_network of this V1PodSpec.
:rtype: bool
"""
return self._host_network
@host_network.setter
def host_network(self, host_network):
"""
Sets the host_network of this V1PodSpec.
Host networking requested for this pod. Use the host's network namespace. If this option is set, the ports that will be used must be specified. Default to false.
:param host_network: The host_network of this V1PodSpec.
:type: bool
"""
self._host_network = host_network
@property
def host_pid(self):
"""
Gets the host_pid of this V1PodSpec.
Use the host's pid namespace. Optional: Default to false.
:return: The host_pid of this V1PodSpec.
:rtype: bool
"""
return self._host_pid
@host_pid.setter
def host_pid(self, host_pid):
"""
Sets the host_pid of this V1PodSpec.
Use the host's pid namespace. Optional: Default to false.
:param host_pid: The host_pid of this V1PodSpec.
:type: bool
"""
self._host_pid = host_pid
@property
def host_ipc(self):
"""
Gets the host_ipc of this V1PodSpec.
Use the host's ipc namespace. Optional: Default to false.
:return: The host_ipc of this V1PodSpec.
:rtype: bool
"""
return self._host_ipc
@host_ipc.setter
def host_ipc(self, host_ipc):
"""
Sets the host_ipc of this V1PodSpec.
Use the host's ipc namespace. Optional: Default to false.
:param host_ipc: The host_ipc of this V1PodSpec.
:type: bool
"""
self._host_ipc = host_ipc
@property
def security_context(self):
"""
Gets the security_context of this V1PodSpec.
SecurityContext holds pod-level security attributes and common container settings. Optional: Defaults to empty. See type description for default values of each field.
:return: The security_context of this V1PodSpec.
:rtype: V1PodSecurityContext
"""
return self._security_context
@security_context.setter
def security_context(self, security_context):
"""
Sets the security_context of this V1PodSpec.
SecurityContext holds pod-level security attributes and common container settings. Optional: Defaults to empty. See type description for default values of each field.
:param security_context: The security_context of this V1PodSpec.
:type: V1PodSecurityContext
"""
self._security_context = security_context
@property
def image_pull_secrets(self):
"""
Gets the image_pull_secrets of this V1PodSpec.
ImagePullSecrets is an optional list of references to secrets in the same namespace to use for pulling any of the images used by this PodSpec. If specified, these secrets will be passed to individual puller implementations for them to use. For example, in the case of docker, only DockerConfig type secrets are honored. More info: http://releases.k8s.io/HEAD/docs/user-guide/images.md#specifying-imagepullsecrets-on-a-pod
:return: The image_pull_secrets of this V1PodSpec.
:rtype: list[V1LocalObjectReference]
"""
return self._image_pull_secrets
@image_pull_secrets.setter
def image_pull_secrets(self, image_pull_secrets):
"""
Sets the image_pull_secrets of this V1PodSpec.
ImagePullSecrets is an optional list of references to secrets in the same namespace to use for pulling any of the images used by this PodSpec. If specified, these secrets will be passed to individual puller implementations for them to use. For example, in the case of docker, only DockerConfig type secrets are honored. More info: http://releases.k8s.io/HEAD/docs/user-guide/images.md#specifying-imagepullsecrets-on-a-pod
:param image_pull_secrets: The image_pull_secrets of this V1PodSpec.
:type: list[V1LocalObjectReference]
"""
self._image_pull_secrets = image_pull_secrets
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "532dbd23c3a7255fa2318faf1c8cf1a5",
"timestamp": "",
"source": "github",
"line_count": 462,
"max_line_length": 541,
"avg_line_length": 39.85497835497836,
"alnum_prop": 0.6440558301200239,
"repo_name": "danielfrg/jupyterhub-kubernetes_spawner",
"id": "7e19de8117f26369e5306b0f5ab04fa274f6c3b9",
"size": "18430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes_spawner/swagger_client/models/v1_pod_spec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1759061"
},
{
"name": "Shell",
"bytes": "133"
}
],
"symlink_target": ""
} |
"""Python binding of SPI wrapper of LetMeCreate library."""
import ctypes
_LIB = ctypes.CDLL('libletmecreate_core.so')
# SPI_SPEED
SPI_680K = 680000
SPI_1M36 = 1360000
SPI_2M73 = 2730000
SPI_5M46 = 5460000
SPI_10M93 = 10930000
SPI_21M87 = 21870000
SPI_43M75 = 43750000
def init():
"""Initialise SPI on all mikrobus.
ALL SPI buses are configured as:
- 8 bits per word
- 2.73MHz
- Mode 3
The current SPI bus is set to MIKROBUS_1.
Note: An exception is thrown if an error occurs during initialisation.
"""
ret = _LIB.spi_init()
if ret < 0:
raise Exception("spi init failed")
def set_mode(mikrobus_index, mode):
"""Set the spi mode of the current SPI bus.
The SPI bus must be initialised before calling this function.
mikrobus_index: must be 0 (MIKROBUS_1) or 1 (MIKROBUS_2)
mode: must be 0, 1, 2 or 3.
Note: An exception is thrown if it fails to set the mode.
"""
ret = _LIB.spi_set_mode(mikrobus_index, mode)
if ret < 0:
raise Exception("spi set mode failed")
def set_speed(mikrobus_index, speed):
"""Set the clock speed of the current SPI bus.
The SPI bus must be initialised before calling this function. The SPI driver
has only seven different speeds available, defined in #SPI_SPEED. If you try
to set a speed that is not supported by the driver, it will find the closest
speed without exceeding it.
For instance, if you try to set the speed to 3MHz, the actual speed will be
set to 2.73MHz.
mikrobus_index: must be 0 (MIKROBUS_1) or 1 (MIKROBUS_2)
speed: Clock speed in Hz of the current SPI bus.
Note: An exception is thrown if it fails to set the mode.
"""
ret = _LIB.spi_set_speed(mikrobus_index, speed)
if ret < 0:
raise Exception("spi set speed failed")
def select_bus(mikrobus_index):
"""Select the SPI bus
mikrobus_index: must be 0 (MIKROBUS_1) or 1 (MIKROBUS_2)
"""
_LIB.spi_select_bus(mikrobus_index)
def get_current_bus():
"""Returns the currently selected SPI bus."""
return _LIB.spi_get_current_bus()
def transfer(tx_data):
"""Transfers data using the current SPI bus. Returns a list of bytes.
tx_data: A list of bytes to send.
Note: An exception is thrown if an error occurs during the transfer.
"""
length = len(tx_data)
tx_buffer = (ctypes.c_uint8 * length)(*tx_data)
rx_buffer = (ctypes.c_uint8 * length)()
ret = _LIB.spi_transfer(tx_buffer, rx_buffer, length)
if ret < 0:
raise Exception("spi transfer failed")
return [rx_buffer[i] for i in range(length)]
def get_maximum_tranfer_length():
"""Returns maximum length of a transfer in bytes.
Note: An exception is thrown if it fails to find the limit.
"""
transfer_length_limit = ctypes.c_uint32(0)
ret = _LIB.spi_get_maximum_tranfer_length(ctypes.byref(transfer_length_limit))
if ret < 0:
raise Exception("spi get maximum tranfer length failed")
return transfer_length_limit.value
def release():
"""Release all SPI bus.
Note: An exception is thrown if it fails to release all SPI bus.
"""
ret = _LIB.spi_release()
if ret < 0:
raise Exception("spi release failed")
| {
"content_hash": "c182c72e4b8f9c9e7091320be67e2e9b",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 82,
"avg_line_length": 27.3781512605042,
"alnum_prop": 0.6626764886433395,
"repo_name": "francois-berder/PyLetMeCreate",
"id": "decd39fbe4e08f9574f520b96ef6ee999c956491",
"size": "3281",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "letmecreate/core/spi.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "85404"
}
],
"symlink_target": ""
} |
"""Test for Nest events for the Smart Device Management API.
These tests fake out the subscriber/devicemanager, and are not using a real
pubsub subscriber.
"""
import datetime
from google_nest_sdm.device import Device
from google_nest_sdm.event import EventMessage
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.util.dt import utcnow
from .common import async_setup_sdm_platform
from tests.common import async_capture_events
DOMAIN = "nest"
DEVICE_ID = "some-device-id"
PLATFORM = "camera"
NEST_EVENT = "nest_event"
EVENT_SESSION_ID = "CjY5Y3VKaTZwR3o4Y19YbTVfMF..."
EVENT_ID = "FWWVQVUdGNUlTU2V4MGV2aTNXV..."
async def async_setup_devices(hass, device_type, traits={}):
"""Set up the platform and prerequisites."""
devices = {
DEVICE_ID: Device.MakeDevice(
{
"name": DEVICE_ID,
"type": device_type,
"traits": traits,
},
auth=None,
),
}
return await async_setup_sdm_platform(hass, PLATFORM, devices=devices)
def create_device_traits(event_traits=[]):
"""Create fake traits for a device."""
result = {
"sdm.devices.traits.Info": {
"customName": "Front",
},
"sdm.devices.traits.CameraLiveStream": {
"maxVideoResolution": {
"width": 640,
"height": 480,
},
"videoCodecs": ["H264"],
"audioCodecs": ["AAC"],
},
}
result.update({t: {} for t in event_traits})
return result
def create_event(event_type, device_id=DEVICE_ID, timestamp=None):
"""Create an EventMessage for a single event type."""
events = {
event_type: {
"eventSessionId": EVENT_SESSION_ID,
"eventId": EVENT_ID,
},
}
return create_events(events=events, device_id=device_id)
def create_events(events, device_id=DEVICE_ID, timestamp=None):
"""Create an EventMessage for events."""
if not timestamp:
timestamp = utcnow()
return EventMessage(
{
"eventId": "some-event-id",
"timestamp": timestamp.isoformat(timespec="seconds"),
"resourceUpdate": {
"name": device_id,
"events": events,
},
},
auth=None,
)
async def test_doorbell_chime_event(hass):
"""Test a pubsub message for a doorbell event."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits(["sdm.devices.traits.DoorbellChime"]),
)
registry = er.async_get(hass)
entry = registry.async_get("camera.front")
assert entry is not None
assert entry.unique_id == "some-device-id-camera"
assert entry.original_name == "Front"
assert entry.domain == "camera"
device_registry = dr.async_get(hass)
device = device_registry.async_get(entry.device_id)
assert device.name == "Front"
assert device.model == "Doorbell"
assert device.identifiers == {("nest", DEVICE_ID)}
timestamp = utcnow()
await subscriber.async_receive_event(
create_event("sdm.devices.events.DoorbellChime.Chime", timestamp=timestamp)
)
await hass.async_block_till_done()
event_time = timestamp.replace(microsecond=0)
assert len(events) == 1
assert events[0].data == {
"device_id": entry.device_id,
"type": "doorbell_chime",
"timestamp": event_time,
"nest_event_id": EVENT_SESSION_ID,
}
async def test_camera_motion_event(hass):
"""Test a pubsub message for a camera motion event."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.CAMERA",
create_device_traits(["sdm.devices.traits.CameraMotion"]),
)
registry = er.async_get(hass)
entry = registry.async_get("camera.front")
assert entry is not None
timestamp = utcnow()
await subscriber.async_receive_event(
create_event("sdm.devices.events.CameraMotion.Motion", timestamp=timestamp)
)
await hass.async_block_till_done()
event_time = timestamp.replace(microsecond=0)
assert len(events) == 1
assert events[0].data == {
"device_id": entry.device_id,
"type": "camera_motion",
"timestamp": event_time,
"nest_event_id": EVENT_SESSION_ID,
}
async def test_camera_sound_event(hass):
"""Test a pubsub message for a camera sound event."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.CAMERA",
create_device_traits(["sdm.devices.traits.CameraSound"]),
)
registry = er.async_get(hass)
entry = registry.async_get("camera.front")
assert entry is not None
timestamp = utcnow()
await subscriber.async_receive_event(
create_event("sdm.devices.events.CameraSound.Sound", timestamp=timestamp)
)
await hass.async_block_till_done()
event_time = timestamp.replace(microsecond=0)
assert len(events) == 1
assert events[0].data == {
"device_id": entry.device_id,
"type": "camera_sound",
"timestamp": event_time,
"nest_event_id": EVENT_SESSION_ID,
}
async def test_camera_person_event(hass):
"""Test a pubsub message for a camera person event."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits(["sdm.devices.traits.CameraPerson"]),
)
registry = er.async_get(hass)
entry = registry.async_get("camera.front")
assert entry is not None
timestamp = utcnow()
await subscriber.async_receive_event(
create_event("sdm.devices.events.CameraPerson.Person", timestamp=timestamp)
)
await hass.async_block_till_done()
event_time = timestamp.replace(microsecond=0)
assert len(events) == 1
assert events[0].data == {
"device_id": entry.device_id,
"type": "camera_person",
"timestamp": event_time,
"nest_event_id": EVENT_SESSION_ID,
}
async def test_camera_multiple_event(hass):
"""Test a pubsub message for a camera person event."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits(
["sdm.devices.traits.CameraMotion", "sdm.devices.traits.CameraPerson"]
),
)
registry = er.async_get(hass)
entry = registry.async_get("camera.front")
assert entry is not None
event_map = {
"sdm.devices.events.CameraMotion.Motion": {
"eventSessionId": EVENT_SESSION_ID,
"eventId": EVENT_ID,
},
"sdm.devices.events.CameraPerson.Person": {
"eventSessionId": EVENT_SESSION_ID,
"eventId": EVENT_ID,
},
}
timestamp = utcnow()
await subscriber.async_receive_event(create_events(event_map, timestamp=timestamp))
await hass.async_block_till_done()
event_time = timestamp.replace(microsecond=0)
assert len(events) == 2
assert events[0].data == {
"device_id": entry.device_id,
"type": "camera_motion",
"timestamp": event_time,
"nest_event_id": EVENT_SESSION_ID,
}
assert events[1].data == {
"device_id": entry.device_id,
"type": "camera_person",
"timestamp": event_time,
"nest_event_id": EVENT_SESSION_ID,
}
async def test_unknown_event(hass):
"""Test a pubsub message for an unknown event type."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits(["sdm.devices.traits.DoorbellChime"]),
)
await subscriber.async_receive_event(create_event("some-event-id"))
await hass.async_block_till_done()
assert len(events) == 0
async def test_unknown_device_id(hass):
"""Test a pubsub message for an unknown event type."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits(["sdm.devices.traits.DoorbellChime"]),
)
await subscriber.async_receive_event(
create_event("sdm.devices.events.DoorbellChime.Chime", "invalid-device-id")
)
await hass.async_block_till_done()
assert len(events) == 0
async def test_event_message_without_device_event(hass):
"""Test a pubsub message for an unknown event type."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits(["sdm.devices.traits.DoorbellChime"]),
)
timestamp = utcnow()
event = EventMessage(
{
"eventId": "some-event-id",
"timestamp": timestamp.isoformat(timespec="seconds"),
},
auth=None,
)
await subscriber.async_receive_event(event)
await hass.async_block_till_done()
assert len(events) == 0
async def test_doorbell_event_thread(hass):
"""Test a series of pubsub messages in the same thread."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits(
[
"sdm.devices.traits.CameraClipPreview",
"sdm.devices.traits.CameraPerson",
]
),
)
registry = er.async_get(hass)
entry = registry.async_get("camera.front")
assert entry is not None
event_message_data = {
"eventId": "some-event-id-ignored",
"resourceUpdate": {
"name": DEVICE_ID,
"events": {
"sdm.devices.events.CameraMotion.Motion": {
"eventSessionId": EVENT_SESSION_ID,
"eventId": "n:1",
},
"sdm.devices.events.CameraClipPreview.ClipPreview": {
"eventSessionId": EVENT_SESSION_ID,
"previewUrl": "image-url-1",
},
},
},
"eventThreadId": "CjY5Y3VKaTZwR3o4Y19YbTVfMF...",
"resourcegroup": [DEVICE_ID],
}
# Publish message #1 that starts the event thread
timestamp1 = utcnow()
message_data_1 = event_message_data.copy()
message_data_1.update(
{
"timestamp": timestamp1.isoformat(timespec="seconds"),
"eventThreadState": "STARTED",
}
)
await subscriber.async_receive_event(EventMessage(message_data_1, auth=None))
# Publish message #2 that sends a no-op update to end the event thread
timestamp2 = timestamp1 + datetime.timedelta(seconds=1)
message_data_2 = event_message_data.copy()
message_data_2.update(
{
"timestamp": timestamp2.isoformat(timespec="seconds"),
"eventThreadState": "ENDED",
}
)
await subscriber.async_receive_event(EventMessage(message_data_2, auth=None))
await hass.async_block_till_done()
# The event is only published once
assert len(events) == 1
assert events[0].data == {
"device_id": entry.device_id,
"type": "camera_motion",
"timestamp": timestamp1.replace(microsecond=0),
"nest_event_id": EVENT_SESSION_ID,
}
async def test_doorbell_event_session_update(hass):
"""Test a pubsub message with updates to an existing session."""
events = async_capture_events(hass, NEST_EVENT)
subscriber = await async_setup_devices(
hass,
"sdm.devices.types.DOORBELL",
create_device_traits(
[
"sdm.devices.traits.CameraClipPreview",
"sdm.devices.traits.CameraPerson",
"sdm.devices.traits.CameraMotion",
]
),
)
registry = er.async_get(hass)
entry = registry.async_get("camera.front")
assert entry is not None
# Message #1 has a motion event
timestamp1 = utcnow()
await subscriber.async_receive_event(
create_events(
{
"sdm.devices.events.CameraMotion.Motion": {
"eventSessionId": EVENT_SESSION_ID,
"eventId": "n:1",
},
"sdm.devices.events.CameraClipPreview.ClipPreview": {
"eventSessionId": EVENT_SESSION_ID,
"previewUrl": "image-url-1",
},
},
timestamp=timestamp1,
)
)
# Message #2 has an extra person event
timestamp2 = utcnow()
await subscriber.async_receive_event(
create_events(
{
"sdm.devices.events.CameraMotion.Motion": {
"eventSessionId": EVENT_SESSION_ID,
"eventId": "n:1",
},
"sdm.devices.events.CameraPerson.Person": {
"eventSessionId": EVENT_SESSION_ID,
"eventId": "n:2",
},
"sdm.devices.events.CameraClipPreview.ClipPreview": {
"eventSessionId": EVENT_SESSION_ID,
"previewUrl": "image-url-1",
},
},
timestamp=timestamp2,
)
)
await hass.async_block_till_done()
assert len(events) == 2
assert events[0].data == {
"device_id": entry.device_id,
"type": "camera_motion",
"timestamp": timestamp1.replace(microsecond=0),
"nest_event_id": EVENT_SESSION_ID,
}
assert events[1].data == {
"device_id": entry.device_id,
"type": "camera_person",
"timestamp": timestamp2.replace(microsecond=0),
"nest_event_id": EVENT_SESSION_ID,
}
| {
"content_hash": "76138f4593d9f597503c17ebcdbd3529",
"timestamp": "",
"source": "github",
"line_count": 448,
"max_line_length": 87,
"avg_line_length": 31.408482142857142,
"alnum_prop": 0.5967592921611826,
"repo_name": "home-assistant/home-assistant",
"id": "4a6259991554eab6ab28f6e0d95860f1a5c1524d",
"size": "14071",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "tests/components/nest/test_events.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20557383"
},
{
"name": "Shell",
"bytes": "6671"
}
],
"symlink_target": ""
} |
import sys
sys.path.append('/ebio/ag-neher/share/users/rneher/FluPrediction_code/flu/src')
sys.path.append('../../src')
import test_flu_prediction as test_flu
import predict_flu as flu
import tree_utils
import numpy as np
from scipy import stats
import glob,pickle,gzip,os,argparse
from datetime import date
analysis_folder = test_flu.flu_analysis_folder
# parse the commandline arguments
parser = test_flu.make_flu_parser()
params=parser.parse_args()
params.pred = params.pred.replace('^',' ')
params.test =params.test.replace('^',' ')
params.subsample=0.7
params.diffusion = 1.0
# get run specific file names
fname_base, name_mod = test_flu.get_fname(params)
# allocate arrays to save the predictions
m_list = 2.0**np.arange(-6,4, 1)
nuc_dist_array = np.zeros((params.nreps, 3+2*len(m_list)))
epi_dist_array = np.zeros_like(nuc_dist_array)
top_strains = []
top_strain_method = 'polarizer'
for ii in xrange(params.nreps):
# set up the prediction and pass all parameters to the wrapper function
prediction = test_flu.predict_params(['polarizer'],params)
test_data, test_set = test_flu.make_test_set(prediction, params)
# define the methodes for which the predictions are to be evaluated
methods = [ ('polarizer', '_ext', prediction.terminals),
('polarizer', '_int', prediction.non_terminals)]
for mi,mem_time_scale in enumerate(m_list):
prediction.calculate_polarizers(mem = mem_time_scale)
distances, distances_epi, test_data = test_flu.evaluate(prediction, methods, params, test_data = test_data, test_set = test_set)
nuc_dist_array[ii,mi+3] = distances['polarizer_ext']
epi_dist_array[ii,mi+3] = distances_epi['polarizer_ext']
nuc_dist_array[ii,mi+3+len(m_list)] = distances['polarizer_int']
epi_dist_array[ii,mi+3+len(m_list)] = distances_epi['polarizer_int']
nuc_dist_array[ii,:3] = [distances['average'],distances['minimal'],distances['L&L']]
epi_dist_array[ii,:3] = [distances_epi['average'],distances_epi['minimal'],distances_epi['L&L']]
# memorize the strain predicted best
top_strains.append(prediction.best_node(method = top_strain_method, nodes = prediction.terminals))
#if file does not exist, create and write header
fname_nuc = analysis_folder+'_'.join([fname_base, 'polarizer_nuc.dat'])
if not os.path.isfile(fname_nuc):
with open(fname_nuc, 'w') as outfile:
outfile.write('#average\tminimal\tL&L\t'+'\t'.join([m[0]+m[1] for m in methods])+'\n')
#append the results to existing file
with open(fname_nuc, 'a') as outfile:
np.savetxt(outfile, nuc_dist_array)
#if file does not exist, create and write header
fname_epi =analysis_folder+'_'.join([fname_base, 'polarizer_epi.dat'])
if not os.path.isfile(fname_epi):
with open(fname_epi, 'w') as outfile:
outfile.write('#average\tminimal\tL&L\t'+'\t'.join([m[0]+m[1] for m in methods])+'\n')
#append the results to existing file
with open(fname_epi, 'a') as outfile:
np.savetxt(outfile, epi_dist_array)
# write the best nodes to file:
#fname_strains = analysis_folder+'_'.join([fname_base, name_mod, top_strain_method, 'topstrains.dat'])
#with open(fname_strains, 'a') as outfile:
# for strain in top_strains:
# outfile.write(strain.name+'\t'+str(strain.date)+'\n')
| {
"content_hash": "b3eb2a289adf054a1762eb019e111fe9",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 136,
"avg_line_length": 43.86666666666667,
"alnum_prop": 0.6957446808510638,
"repo_name": "rneher/FitnessInference",
"id": "93d72ed0f187216af9b0aa9f2214f4e7a304e1d6",
"size": "4291",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flu/src/polarizer_statistics.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "5909"
},
{
"name": "Python",
"bytes": "255715"
},
{
"name": "Shell",
"bytes": "34"
}
],
"symlink_target": ""
} |
import sys
import zmq
import time
import pickle
import signal
import numpy as np
def xsrc(port):
context = zmq.Context()
socket = context.socket(zmq.PUB)
socket.connect('tcp://localhost:{}'.format(port))
print(socket)
i = 0
while True:
name = b'foo'
body = i
print((name, body))
socket.send_multipart((name, pickle.dumps(body)))
i += 1
time.sleep(1)
##########
def main():
port = sys.argv[1]
signal.signal(signal.SIGINT, signal.SIG_DFL)
xsrc(port)
if __name__ == '__main__':
main()
| {
"content_hash": "aaf4e787952d8516eeddcebc9f1a5bdf",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 57,
"avg_line_length": 18,
"alnum_prop": 0.5729166666666666,
"repo_name": "rxa254/MoodCube",
"id": "77bd0f85c3db3d3fd54916401ea9d65fa6d79f75",
"size": "600",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "synapse/sources/xsrc.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "13372"
},
{
"name": "Python",
"bytes": "94654"
},
{
"name": "Shell",
"bytes": "286"
}
],
"symlink_target": ""
} |
"""TF: Tensorflow frontend."""
from __future__ import absolute_import as _abs
from __future__ import print_function
import logging
import warnings
from collections import defaultdict
# Numpy support
import numpy as np
import tvm
from topi.util import get_const_tuple
from .. import analysis
from .. import transform as _transform
from .. import expr as _expr
from .. import op as _op
from ..expr_functor import ExprMutator
from .. import module as _module
__all__ = ['from_tensorflow']
def _infer_value(input_val, params):
from tvm.contrib import graph_runtime
# Check that all free variables have associated parameters.
assert all(var.name_hint in params.keys() for var in analysis.free_vars(
input_val)), "All inputs to infer must be available in params."
func = _expr.Function(analysis.free_vars(input_val), input_val)
with tvm.relay.build_config(opt_level=0):
graph, lib, params = tvm.relay.build(func, target="llvm", params=params)
ctx = tvm.context("llvm", 0)
m = graph_runtime.create(graph, lib, ctx)
m.set_input(**params)
m.run()
return m.get_output(0)
def _get_relay_op(op_name):
try:
op = getattr(_op, op_name)
except AttributeError:
try:
op = getattr(_op.nn, op_name)
except AttributeError:
op = getattr(_op.image, op_name)
if not op:
raise tvm.error.OpNotImplemented(
'Operator {} is not supported for frontend TensorFlow.'.format(op_name))
return op
class AttrCvt(object):
"""Common attribute converter. An AttrConverter instance is a callable:
```
attr_converter = AttrConverter(op_name, transforms={'a':'b', 'c':('d', 1)})
new_op_name, new_attr = attr_converter(attrs)
```
Parameters
----------
op_name : str or callable
If set as str, returned operator name is the str.
If set as callable, returned operator is the str returned by calling:
`op_name = func(attr)`
transforms : dict of `new_name, or (new_name, default_value, transform function)`
If only a new_name is provided, it's like renaming the attribute name.
If default_value if provided, then the attribute is considered as optional.
If transform function is provided, the original attribute value is handled
by transform function.
excludes : list
A list of excluded attributes that should `NOT` appear.
Raise NotImplementedError if occurred.
disables : list
A list of attributes that is disabled in relay. Log warnings.
ignores : list
A list of attributes that is ignored in relay. Debug level logging.
extras : dict
A series of additional attributes should be added anyway to the returned
attribute dict.
custom_check : callable
A custom function takes attribute, and return True/False.
Raise RuntimeError if not bool(True) returned.
"""
def __init__(self, op_name, transforms=None,
excludes=None, disables=None, ignores=None,
extras=None, custom_check=None):
self._op_name = op_name
self._transforms = transforms if transforms else {}
self._excludes = excludes if excludes else []
self._disables = disables if disables else []
self._ignores = ignores if ignores else []
self._extras = extras if extras else {}
self._custom_check = custom_check
def __call__(self, inputs, attrs, *args):
self._ignores.append('_output_shapes')
self._ignores.append('_input_shapes')
self._ignores.append('T')
self._ignores.append('use_cudnn_on_gpu')
self._ignores.append('_node_name')
self._ignores.append('is_training')
self._ignores.append('_target_layout')
# apply custom check
if self._custom_check:
func, msg = self._custom_check
if not func(attrs):
raise RuntimeError("Check failed: {}".format(msg))
# get new op_name
if isinstance(self._op_name, str):
op_name = self._op_name
else:
assert callable(self._op_name), "op_name can either be string or callable"
op_name = self._op_name(attrs)
# convert attributes
new_attrs = {}
for k in attrs.keys():
if k in self._excludes:
raise tvm.error.OpAttributeUnimplemented(
'Attribute {} in operator {} is not supported.'.format(k, op_name))
elif k in self._disables:
logging.warning("Attribute %s is disabled in relay.%s", k, op_name)
elif k in self._ignores:
logging.debug("Attribute %s is ignored in relay.%s", k, op_name)
elif k in self._transforms:
new_name, defaults, transform = self._parse_default(self._transforms[k])
if defaults is None:
new_attr = self._required_attr(attrs, k)
else:
new_attr = attrs.get(k, None)
if new_attr is None:
new_attrs[new_name] = defaults
else:
new_attrs[new_name] = transform(new_attr)
else:
# copy
new_attrs[k] = attrs[k]
# add extras
new_attrs.update(self._extras)
return _get_relay_op(op_name)(*inputs, **new_attrs)
def _parse_default(self, target):
"""Helper function to parse default values."""
if not isinstance(target, (list, tuple)):
k, v, t = target, None, lambda x: x
elif len(target) == 1:
k, v, t = target[0], None, lambda x: x
elif len(target) == 2:
k, v, t = target[0], target[1], lambda x: x
elif len(target) > 2:
k, v, t = target[0], target[1], target[2]
else:
k = None # should raise
if not isinstance(k, str):
msg = "{} is not a valid target, (name, default) expected.".format(target)
raise ValueError(msg)
return k, v, t
def _parse_bool(self, value):
"""Helper function to parse default boolean values."""
if isinstance(value, str):
return value.strip().lower() in ['true', '1', 't', 'y', 'yes']
return bool(value)
def _required_attr(self, attr, key):
"""Wrapper for getting required attributes."""
assert isinstance(attr, dict)
if key not in attr:
raise tvm.error.OpAttributeRequired(
'Attribute {} not found in operator {}'.format(key, self._op_name))
return attr[key]
def _get_pad_pair(input1d, kernel1d, stride1d):
if input1d % stride1d == 0:
pad = max(kernel1d - stride1d, 0)
else:
pad = max(kernel1d - (input1d % stride1d), 0)
pad_before = pad // 2
pad_after = pad - pad_before
return [pad_before, pad_after]
def _get_name_hint(node):
name = ''
if hasattr(node, "name_hint"):
name = node.name_hint
return name
def _math_name_picker(surfix):
def _impl(attr):
return 'broadcast_' + surfix
return _impl
def _dimension_picker(prefix, surfix=''):
def _impl(attr):
kernel = attr['kernel_shape']
if len(kernel) == 2:
return prefix + '2d' + surfix
raise tvm.error.OpAttributeInvalid(
'Only 2D kernels are supported for operator {}'.format(prefix + '2d'))
return _impl
def _dimension_constraint():
def _dim_check(attrs):
if len(attrs['kernel_shape']) == 2:
return True
return False
return _dim_check, "Only 2d kernel supported."
def _infer_channels(node, params, transpose=False):
"""A hack for getting 'channels' or 'units' since tensorflow don't provide
these attributes. We check the shape of weights provided to get the number.
"""
out_shape = _infer_shape(node, params)
channels = out_shape[0] if not transpose else out_shape[1]
return channels
def _infer_out_shapes(inputs, params):
"""A method to get the output shape of intermediate nodes in the relay graph."""
return [_infer_shape(inputs, params)]
def _infer_type(node):
"""A method to infer the type of an intermediate node in the relay graph."""
mod = _module.Module.from_expr(node)
mod = _transform.InferType()(mod)
entry = mod["main"]
return entry if isinstance(node, _expr.Function) else entry.body
def _infer_shape(node, params=None):
"""A method to get the output shape of an intermediate node in the relay graph."""
out_type = _infer_type(node)
return get_const_tuple(out_type.checked_type.shape)
def _get_param(params, input_node):
return params.pop(input_node.name_hint).asnumpy()
def _get_num_param(params, input_node):
return _get_param(params, input_node)[0]
def _get_list_param(params, input_node):
return _get_param(params, input_node).tolist()
def _get_tuple_param(params, input_node):
return tuple(_get_param(params, input_node))
def _rsqrt():
def _impl(inputs, attr, params):
inputs.append(tvm.relay.const(-0.5, attr['T'].name))
return AttrCvt(op_name="power")(inputs, attr)
return _impl
def _argx(func, func_name):
""" A common wrapper for argmin and argmax operations """
def _impl(inputs, attr, params):
try:
# In Tensorflow, `axis` argument is a Tensor, not attribute. We
# support the case where it inputs from a scalar constant.
axis_input_value = [_get_num_param(params, inputs[1])]
except (IndexError, KeyError):
raise TypeError( \
"Unsupported argument for `{}` : `axis` should be a constant".format(func_name))
return func(inputs[0], axis=axis_input_value, keepdims=False)
return _impl
def _elemwise(name):
def _impl(inputs, attr, params):
assert len(inputs) == 2, "{} take 2 inputs, {} given".format(name, len(inputs))
return _get_relay_op(name)(*inputs)
return _impl
def _pooling(name):
def _impl(inputs, attr, params):
attr['data_format'] = attr['data_format'].decode("utf-8")
flip_layout = False
input_shape = attr['_input_shapes'][inputs[0]]
if attr['data_format'] == 'NHWC':
attr['kernel_shape'] = (attr['ksize'][1], attr['ksize'][2])
attr['strides'] = (attr['strides'][1], attr['strides'][2])
elif attr['data_format'] == 'NCHW':
attr['kernel_shape'] = (attr['ksize'][2], attr['ksize'][3])
attr['strides'] = (attr['strides'][2], attr['strides'][3])
else:
msg = 'Value {} of attribute "data_format" of operator Pooling ' \
'is not valid.'
raise tvm.error.OpAttributeInvalid(msg.format(attrs['data_format']))
if attr['_target_layout'] == "NCHW" and attr['data_format'] == "NHWC":
tmp_shape = attr['_input_shapes'][inputs[0]]
input_shape = [tmp_shape[ii] for ii in (0, 3, 1, 2)]
inputs[0] = _op.transpose(inputs[0], axes=(0, 3, 1, 2))
attr['data_format'] = "NCHW"
flip_layout = True
# Fix padding
attr['padding'] = attr['padding'].decode("utf-8")
if attr['padding'] == 'VALID':
attr['padding'] = [0, 0]
elif attr['padding'] == 'SAME':
stride_h, stride_w = attr['strides']
kernel_h, kernel_w = attr['kernel_shape']
if attr['data_format'] == 'NHWC':
in_h = input_shape[1]
in_w = input_shape[2]
else:
in_h = input_shape[2]
in_w = input_shape[3]
pad_v = _get_pad_pair(in_h, kernel_h, stride_h)
pad_h = _get_pad_pair(in_w, kernel_w, stride_w)
attr['padding'] = [pad_v[0], pad_h[0], pad_v[1], pad_h[1]]
else:
msg = 'Value {} in attribute "padding" of operator Pooling is ' \
'not valid.'
raise tvm.error.OpAttributeInvalid(msg.format(attr['padding']))
if name == "avg_pool":
attr['count_include_pad'] = False
out = AttrCvt(
op_name=_dimension_picker(name),
transforms={
'kernel_shape':'pool_size',
'data_format':'layout'},
ignores=['ksize'],
extras={'ceil_mode': False},
custom_check=_dimension_constraint())(inputs, attr)
if flip_layout:
out = _op.transpose(out, axes=(0, 2, 3, 1))
return out
return _impl
def _conv(opname):
def _impl(inputs, attr, params):
attr['data_format'] = attr['data_format'].decode("utf-8")
flip_layout = False
# NCHW Layout require weights transpose
if attr['data_format'] == 'NCHW':
tmp_shape = attr['_input_shapes'][inputs[1]]
tmp_shape = [tmp_shape[ii] for ii in (3, 2, 0, 1)]
inputs[1] = _op.transpose(inputs[1], axes=(3, 2, 0, 1))
attr['_input_shapes'][inputs[1]] = tmp_shape
input_shape = attr['_input_shapes'][inputs[0]]
weights_shape = attr['_input_shapes'][inputs[1]]
if attr['_target_layout'] == "NCHW" and attr['data_format'] == "NHWC":
input_shape = [input_shape[ii] for ii in (0, 3, 1, 2)]
inputs[0] = _op.transpose(inputs[0], axes=(0, 3, 1, 2))
if opname == 'conv':
weights_shape = [weights_shape[ii] for ii in (3, 2, 0, 1)]
inputs[1] = _op.transpose(inputs[1], axes=(3, 2, 0, 1))
else:
weights_shape = [weights_shape[ii] for ii in (2, 3, 0, 1)]
inputs[1] = _op.transpose(inputs[1], axes=(2, 3, 0, 1))
attr['data_format'] = "NCHW"
attr['strides'] = [attr['strides'][ii] for ii in (0, 3, 1, 2)]
flip_layout = True
if attr['data_format'] == 'NHWC':
kernel_h, kernel_w, _, depth_mult = weights_shape
attr['kernel_shape'] = (weights_shape[0], weights_shape[1])
if opname == 'conv':
attr['channels'] = weights_shape[3]
else:
attr['channels'] = input_shape[3] * depth_mult
if 'dilations' in attr:
attr['dilations'] = (attr['dilations'][1], attr['dilations'][2])
attr['strides'] = (attr['strides'][1], attr['strides'][2])
elif attr['data_format'] == 'NCHW':
depth_mult, _, kernel_h, kernel_w = weights_shape
attr['kernel_shape'] = (weights_shape[2], weights_shape[3])
if opname == 'conv':
attr['channels'] = weights_shape[0]
else:
attr['channels'] = input_shape[0] * depth_mult
if attr['channels'] < 0:
attr['channels'] *= -1
if 'dilations' in attr:
attr['dilations'] = (attr['dilations'][2], attr['dilations'][3])
attr['strides'] = (attr['strides'][2], attr['strides'][3])
else:
msg = 'Value {} in attribute "data_format" of operator Conv is ' \
'not valid.'
raise tvm.error.OpAttributeInvalid(msg.format(attr['data_format']))
if opname == 'depthwise':
attr['groups'] = attr['channels']
# Fix padding
attr['padding'] = attr['padding'].decode("utf-8")
if attr['padding'] == 'VALID':
attr['padding'] = [0, 0]
elif attr['padding'] == 'SAME':
stride_h, stride_w = attr['strides']
kernel_h, kernel_w = attr['kernel_shape']
if attr['data_format'] == 'NHWC':
in_h = input_shape[1]
in_w = input_shape[2]
else:
in_h = input_shape[2]
in_w = input_shape[3]
dilation_h = attr['dilations'][0]
dilation_w = attr['dilations'][1]
dilated_kernel_h = (kernel_h - 1) * dilation_h + 1
dilated_kernel_w = (kernel_w - 1) * dilation_w + 1
pad_v = _get_pad_pair(in_h, dilated_kernel_h, stride_h)
pad_h = _get_pad_pair(in_w, dilated_kernel_w, stride_w)
if attr['data_format'] == 'NHWC':
inputs[0] = _op.nn.pad(data=inputs[0],
pad_width=((0, 0),
(pad_v[0], pad_v[1]),
(pad_h[0], pad_h[1]),
(0, 0)))
else:
inputs[0] = _op.nn.pad(data=inputs[0],
pad_width=((0, 0),
(0, 0),
(pad_v[0], pad_v[1]),
(pad_h[0], pad_h[1])))
attr['padding'] = [0, 0]
else:
msg = 'Value {} in attribute "padding" of operator Conv is not ' \
'valid.'
raise tvm.error.OpAttributeInvalid(msg.format(attr['padding']))
if 'kernel_layout' not in attr:
if opname == 'conv':
attr['kernel_layout'] = 'HWIO' if attr['data_format'] == 'NHWC' else 'OIHW'
else:
attr['kernel_layout'] = 'HWOI' if attr['data_format'] == 'NHWC' else 'OIHW'
use_bias = len(inputs) == 3
channel_axis = 1 if attr['data_format'] == "NCHW" else 3
out = AttrCvt(
op_name=_dimension_picker('conv'),
transforms={
'kernel_shape': 'kernel_size',
'data_format': 'data_layout',
'dilations': ('dilation', (0, 0)),
'group': ('groups', 1)},
custom_check=_dimension_constraint())([inputs[0], inputs[1]], attr)
if use_bias:
out = _op.nn.bias_add(out, inputs[2], axis=channel_axis)
if flip_layout:
out = _op.transpose(out, axes=(0, 2, 3, 1))
return out
return _impl
def _decode_image():
def _impl(inputs, attr, params):
# Image decode wrapper: Expecting user to feed decoded input to next layer drop this layer.
warnings.warn("DecodeJpeg: It's a pass through, please handle preprocessing before input")
return inputs[0]
return _impl
def _crop_and_resize():
def _impl(inputs, attr, params):
# input image is a 4-D tensor of shape [batch, image_height, image_width, depth]
# boxes is a 2-D tensor of shape [num_boxes, 4], 4 is for [y1, x1, y2, x2]
try:
boxes = params.pop(inputs[1].name_hint).asnumpy().tolist()
box_ind = params.pop(inputs[2].name_hint).asnumpy().tolist()
crop_size = params.pop(inputs[3].name_hint).asnumpy().tolist()
except (IndexError, KeyError):
boxes = _infer_value(inputs[1], params).asnumpy().tolist()
box_ind = _infer_value(inputs[2], params).asnumpy().tolist()
crop_size = _infer_value(inputs[3], params).asnumpy().tolist()
data_shape = attr['_input_shapes'][inputs[0]]
data_dim = len(data_shape)
method = attr['method'].decode()
attrs = {}
attrs['size'] = crop_size
attrs['layout'] = 'NHWC'
if method.lower() == 'nearest':
raise tvm.error.OpAttributeUnimplemented(
'Attribute method=nearest is not supported')
else:
attrs['align_corners'] = True
attrs['method'] = 'BILINEAR'
out = None
begin = [0] * data_dim
size = data_shape[:]
for idx in box_ind:
# 1) Crop
# y is mapped to the image coordinate at y * (image_height - 1)
# x is mapped to the image coordinate at x * (image_width - 1)
begin[0] = idx
begin[1] = int(round(boxes[idx][0] * (data_shape[1] - 1)))
begin[2] = int(round(boxes[idx][1] * (data_shape[2] - 1)))
size[0] = idx + 1
size[1] = int(round((data_shape[1] - 1) * boxes[idx][2])) + 1
size[2] = int(round((data_shape[2] - 1) * boxes[idx][3])) + 1
res_crop = _op.strided_slice(inputs[0], begin=begin, end=size)
# 2) Resize
res_resize = _get_relay_op('resize')(res_crop, **attrs)
out = _op.concatenate([out, res_resize], axis=0) if out else res_resize
return out
return _impl
def _cast():
def _impl(inputs, attr, params):
return inputs[0].astype(attr['DstT'].name)
return _impl
def _expand_dims():
def _impl(inputs, attr, params):
dim_input = inputs.pop(1)
axis = _get_num_param(params, dim_input)
return AttrCvt(op_name="expand_dims", ignores=['Tdim', 'N'],
extras={'axis': int(axis), 'num_newaxis': 1})(inputs, attr)
return _impl
def _resize_bilinear():
def _impl(inputs, attr, params):
size = attr['_output_shapes'][0][1:3]
# Important that the size is defined. If an axis is not, we need to infer what
# the shape should be.
if -1 in size:
size = _infer_value(inputs[1], params).asnumpy().reshape([-1]).tolist()
attr['size'] = size
inputs.pop(1)
# NHWC
attr['layout'] = 'NHWC'
return AttrCvt(op_name="resize",
ignores=['Tdim'],
extras={'method': "BILINEAR"})(inputs, attr)
return _impl
def _resize_nearest_neighbor():
def _impl(inputs, attr, params):
size = attr['_output_shapes'][0][1:3]
if -1 in size:
size = _infer_value(inputs[1], params).asnumpy().reshape([-1]).tolist()
attr['size'] = size
inputs.pop(1)
# NHWC
attr['layout'] = 'NHWC'
return AttrCvt(op_name="resize",
ignores=['Tdim'],
extras={'method': "NEAREST_NEIGHBOR"})(inputs, attr)
return _impl
def _check_numerics():
def _impl(inputs, attr, params):
# Making a copy node assuming no need to verify
return AttrCvt(op_name="copy", ignores=['message'])(inputs, attr)
return _impl
def _matmul():
def _impl(inputs, attr, params):
channels = _infer_channels(inputs[1], params, not attr['transpose_b'])
if attr['transpose_a']:
inputs[0] = _op.transpose(inputs[0], axes=(1, 0))
if not attr['transpose_b']:
inputs[1] = _op.transpose(inputs[1], axes=(1, 0))
return AttrCvt(op_name="dense",
extras={'units': channels},
ignores=['transpose_a', 'transpose_b', 'T'])(inputs, attr)
return _impl
def _undef():
def _impl(inputs, attr, params):
return _sym.__undef__()
return _impl
def _identity():
def _impl(inputs, attr, params):
return inputs[0]
return _impl
def _concatV2():
def _impl(inputs, attr, params):
pop_node = inputs.pop(len(inputs)-1)
axis = int(_get_num_param(params, pop_node))
return AttrCvt(
op_name="concatenate", ignores=['T', 'N', 'Tidx'],
extras={'axis': axis})([inputs], attr)
return _impl
def _concat():
def _impl(inputs, attr, params):
pop_node = inputs.pop(0)
axis = int(_get_num_param(params, pop_node))
return AttrCvt(
op_name="concatenate", ignores=['N'],
extras={'axis': axis})([inputs], attr)
return _impl
def _pack():
def _impl(inputs, attr, params):
axis = int(attr["axis"])
inputs_reshaped = [_op.expand_dims(i, axis=axis, num_newaxis=1) for i in inputs]
return _op.concatenate(inputs_reshaped, axis)
return _impl
def _tile():
def _impl(inputs, attr, params):
reps = params[inputs.pop().name_hint].asnumpy()
new_input = []
new_input.append(inputs.pop(0))
return AttrCvt(
op_name='tile',
extras={'reps': tuple(reps)},
ignores=['Tmultiples'])(new_input, attr)
return _impl
def _slice():
def _impl(inputs, attr, params):
begin = _get_list_param(params, inputs[1])
size = _get_list_param(params, inputs[2])
data_shape = attr['_input_shapes'][inputs[0]]
data_dim = len(data_shape)
end = size
for i in range(data_dim):
if size[i] == -1:
end[i] = data_shape[i] - begin[i]
else:
end[i] += begin[i]
return _op.strided_slice(inputs[0], begin=begin, end=end)
return _impl
def _reshape():
def _impl(inputs, attr, params):
pop_node = inputs.pop(1)
try:
shape_arg = _get_tuple_param(params, pop_node)
except AttributeError:
# Shape operator is already pruned, hence
# try to infer shape by precompute prune if possible.
params_new = _infer_value(pop_node, params)
shape_arg = tuple(params_new.asnumpy().astype('int64').flatten())
return AttrCvt(
op_name="reshape",
extras={'newshape': shape_arg},
ignores=['Tshape'])(inputs, attr)
return _impl
def _depth_to_space():
def _impl(inputs, attr, params):
# Need to handle data layouts differently.
input_shape = attr['_input_shapes'][inputs[0]]
block_size = int(attr['block_size'])
if attr['data_format'].decode("utf-8") == 'NHWC':
in_n, in_h, in_w, in_c = input_shape
new_c = int(in_c / (block_size * block_size))
# First expand input to larger dimension.
expanded = _op.reshape(
inputs[0], newshape=(in_n, in_h, in_w, block_size, block_size, new_c))
# Now reorder to expand spatial blocks.
transposed = _op.transpose(expanded, axes=(0, 1, 3, 2, 4, 5))
# Finally reshape to proper output.
new_h = in_h * block_size
new_w = in_w * block_size
newshape = (in_n, new_h, new_w, new_c)
else: # Handle NCHW layout
in_n, in_c, in_h, in_w = input_shape
new_c = int(in_c / (block_size * block_size))
expanded = _op.reshape(
inputs[0], newshape=(in_n, block_size, block_size, new_c, in_h, in_w))
transposed = _op.transpose(expanded, axes=(0, 3, 4, 1, 5, 2))
new_h = in_h * block_size
new_w = in_w * block_size
newshape = (in_n, new_c, new_h, new_w)
return AttrCvt(
op_name="reshape",
extras={'newshape': newshape},
ignores=['data_format', 'block_size'])([transposed], attr)
return _impl
def _bias_add():
def _impl(inputs, attr, params):
# Must expand for proper broadcasting in NCHW.
if attr['data_format'].decode("utf-8") == 'NCHW':
bias = _op.reshape(inputs[1], newshape=(1, -1, 1, 1))
else:
bias = inputs[1]
return _op.add(inputs[0], bias)
return _impl
def _broadcast_to():
def _impl(inputs, attr, params):
if isinstance(inputs[1], _expr.Var):
shape = params[inputs[1].name_hint]
else:
shape = _infer_value(inputs[1], params)
shape = list(shape.asnumpy().reshape([-1]))
return _op.broadcast_to(inputs[0], shape)
return _impl
def _squeeze():
def _impl(inputs, attr, params):
if len(attr['squeeze_dims']) == 0:
attr['squeeze_dims'] = None
return AttrCvt(
op_name="squeeze",
transforms={'squeeze_dims':'axis'},
ignores=['T'])(inputs, attr)
return _impl
def _fused_batch_norm():
def _impl(inputs, attr, params):
# Tensorflow: (data, gamma, beta, moving_mean, moving_variance)
# Relay: (data, gamma, beta, moving_mean, moving_varience)
axis = 3
need_cast = False
if 'data_format' in attr:
attr['data_format'] = attr['data_format'].decode("utf-8")
if attr['data_format'] == 'NCHW':
axis = 1
if 'U' in attr:
need_cast = True
inputs[0] = _op.cast(inputs[0], dtype=attr['U'].name)
out = AttrCvt(op_name='batch_norm',
transforms={'scale_after_normalization':'scale',
'variance_epsilon':'epsilon'},
extras={'axis': axis},
ignores=['data_format', 'U'],
disables=['momentum'])(inputs, attr)
if need_cast:
out = _op.cast(out, dtype=attr['T'].name)
return out
return _impl
def _batch_norm():
def _impl(inputs, attr, params):
# Rearrange inputs from
# (data, moving_mean, moving_variance, beta, gamma)
# to
# (data, gamma, beta, moving_mean, moving_var)
new_inputs = [inputs[0], inputs[4], inputs[3], inputs[1], inputs[2]]
axis = 3
if 'data_format' in attr:
attr['data_format'] = attr['data_format'].decode("utf-8")
if attr['data_format'] == 'NCHW':
axis = 1
return AttrCvt(
op_name='batch_norm',
transforms={'scale_after_normalization':'scale', 'variance_epsilon':'epsilon'},
extras={'axis': axis},
ignores=['data_format'],
disables=['momentum'])(new_inputs, attr)
return _impl
def _relu6():
def _impl(inputs, attr, params):
return _op.clip(inputs[0], a_min=0, a_max=6)
return _impl
def _shape():
def _impl(inputs, attr, params):
return np.array(attr['_input_shapes'][inputs[0]], dtype='int32')
return _impl
def _fill():
def _impl(inputs, attr, params):
output_shape = attr['_output_shapes'][0]
# Output shape must be defined to avoid errors. If any axis is not, we must
# try to compute its shape.
if -1 in output_shape:
output_shape = _infer_value(inputs[0], params).asnumpy().reshape([-1]).tolist()
fill_arg = _get_num_param(params, inputs.pop(1))
dtype = attr['T'].name
return _op.full(tvm.relay.const(fill_arg, dtype),
output_shape, dtype)
return _impl
def _lrn():
def _impl(inputs, attr, params):
attr_new = {}
depth_radius = attr.get('depth_radius', 5)
size = (depth_radius * 2) + 1
attr_new['axis'] = 3 # Fix axis, NHWC format
attr_new['size'] = size
attr_new['bias'] = attr.get('bias', 1)
attr_new['alpha'] = attr.get('alpha', 1) * size
attr_new['beta'] = attr.get('beta', 0.5)
return AttrCvt(op_name='lrn')(inputs, attr_new)
return _impl
def _sum():
def _impl(inputs, attr, params):
axis = _get_tuple_param(params, inputs[1])
return AttrCvt(
op_name='sum',
extras={'axis': axis},
transforms={'keep_dims':'keepdims'},
ignores=['name', 'Tidx'])([inputs[0]], attr)
return _impl
def _reduce(op):
def _impl(inputs, attr, params):
axis = params.pop(inputs[1].name_hint).asnumpy()
axis = tuple(axis)
return AttrCvt(
op_name=op,
extras={'axis': axis},
transforms={'keep_dims':'keepdims'},
ignores=['name', 'Tidx'])([inputs[0]], attr)
return _impl
def _square():
def _impl(inputs, attr, params):
return _op.multiply(inputs[0], inputs[0])
return _impl
def _gather():
"GatherV2, Gather"
def _impl(inputs, attr, params):
if len(inputs) > 2:
axis = _get_num_param(params, inputs.pop(2))
else:
axis = 0
new_input = inputs[0:2]
return AttrCvt(op_name="take",
extras={'axis': tvm.const(axis, 'int32')},
ignores=['Tindices', 'Tparams', 'validate_indices',
'Taxis', '_class'])(new_input, attr)
return _impl
def _gather_nd():
"""GatherNd"""
def _impl(inputs, attr, params):
return AttrCvt(op_name="gather_nd",
ignores=['Tindices', 'Tparams',\
'Taxis', '_class'])(inputs, attr)
return _impl
def _stridedSlice():
def _impl(inputs, attr, params):
"""Strided Slice.
Operator description: https://www.tensorflow.org/api_docs/python/tf/strided_slice
Tensorflow mask validation: https://github.com/tensorflow/tensorflow/blob/master/
tensorflow/core/util/strided_slice_op.cc#L147-L368
"""
begin = _get_list_param(params, inputs[1])
end = _get_list_param(params, inputs[2])
stride = _get_list_param(params, inputs[3])
begin_mask = int(attr.get('begin_mask', 0))
end_mask = int(attr.get('end_mask', 0))
ellipsis_mask = int(attr.get('ellipsis_mask', 0))
new_axis_mask = int(attr.get('new_axis_mask', 0))
shrink_axis_mask = int(attr.get('shrink_axis_mask', 0))
data_shape = attr['_input_shapes'][inputs[0]]
data_dim = len(data_shape)
stride_dim = len(stride)
def _transform_mask(stride_dim, ellipsis_mask):
"""Handle mask inputs to create new begin, end, stride and output shape"""
m_begin = [0] * data_dim
m_end = [0] * data_dim
m_stride = [0] * data_dim
fshape_indices = []
#Count new axis after ellipsis_mask, consider while applying ellipsis_mask.
ellipsis_seen = False
new_axes_after_ellipsis = 0
for i in range(stride_dim):
mask = 1 << i
if ellipsis_seen and (mask & new_axis_mask) != 0:
new_axes_after_ellipsis += 1
if (mask & ellipsis_mask) != 0:
ellipsis_seen = True
if not ellipsis_seen:
#Used later for extending the stride attributes in the below loop.
ellipsis_mask |= (1 << stride_dim)
stride_dim += 1
final_index = 0
for index in range(stride_dim):
mask = 1 << index
if mask & ellipsis_mask:
#Identify the end index for applying ellipsis_mask
to_index = min(((data_dim - (stride_dim-index)) + 1 \
+ new_axes_after_ellipsis), data_dim)
for i in range(final_index, to_index):
m_begin[final_index] = 0
m_end[final_index] = data_shape[final_index]
m_stride[final_index] = 1
fshape_indices.append(final_index)
final_index += 1
elif mask &new_axis_mask:
fshape_indices.append(-1)
elif not mask & new_axis_mask:
if final_index == len(m_begin):
break
if mask & begin_mask:
m_begin[final_index] = data_shape[final_index] \
if stride[index] < 0 else 0
elif begin[index]:
m_begin[final_index] = begin[index]
if mask & end_mask:
m_end[final_index] = 0 if stride[index] < 0 \
else data_shape[final_index]
elif end[index]:
m_end[final_index] = end[index]
m_stride[final_index] = stride[index]
if mask & shrink_axis_mask:
#Tensorflow make axis with shrink_axis_mask as dimension 1
m_begin[final_index] = data_shape[final_index] + begin[index] \
if begin[index] < 0 else begin[index]
m_end[final_index] = begin[index] + 1
m_stride[final_index] = 1
fshape_indices.append(-2)
else:
fshape_indices.append(final_index)
final_index += 1
return m_begin, m_end, m_stride, fshape_indices
fshape_indices = None
if begin_mask or end_mask or ellipsis_mask or new_axis_mask or shrink_axis_mask:
begin, end, stride, fshape_indices = _transform_mask(stride_dim, ellipsis_mask)
out = _op.strided_slice(inputs[0], begin=begin, end=end, strides=stride)
out_shape = _infer_shape(out, params)
if not fshape_indices:
fshape_indices = range(len(out_shape))
#Create final output shape.
final_output = []
for gather_index in fshape_indices:
if gather_index == -1:
final_output.append(1)
elif gather_index == -2:
pass
else:
final_output.append(out_shape[gather_index])
if not final_output:
return out
return _op.reshape(out, newshape=tuple(final_output))
return _impl
def _pad(name):
def _impl(inputs, attr, params):
padlist = _get_param(params, inputs[1])
paddings = tuple(tuple(l) for l in padlist)
attr['pad_width'] = paddings
attr['pad_value'] = 0
new_inputs = [inputs[0]]
if name == 'PadV2':
constant_values = _get_num_param(params, inputs[2])
attr['pad_value'] = constant_values
return AttrCvt(
op_name='pad',
ignores=['Tpaddings'],)(new_inputs, attr)
return _impl
def _transpose():
def _impl(inputs, attr, params):
# If perm is not specified, axes is left empty,
# otherwise its value is get from params
try:
axes = _get_list_param(params, inputs[1])
except (IndexError, KeyError):
axes = None
return _op.transpose(inputs[0], axes=axes)
return _impl
def _where():
def _impl(inputs, attr, params):
return AttrCvt(op_name="where")(inputs, attr)
return _impl
def _clip_by_value():
def _impl(inputs, attr, params):
a_min = params.pop(inputs[1].name_hint).asnumpy()[0]
a_max = params.pop(inputs[2].name_hint).asnumpy()[0]
return _op.clip(inputs[0], a_min=a_min, a_max=a_max)
return _impl
def _reverse_v2():
def _impl(inputs, attr, params):
axis = _get_num_param(params, inputs[1])
return AttrCvt(
op_name="reverse",
ignores=['Tidx'],
extras={'axis': int(axis)})([inputs[0]], attr)
return _impl
def _rank():
def _impl(inputs, attr, params):
input_shape = attr['_input_shapes'][inputs[0]]
name = attr["_node_name"]
params[name] = tvm.nd.array([len(input_shape)])
return [_expr.var(name,
shape=params[name].shape,
dtype='int32')]
return _impl
def _range():
def _impl(inputs, attr, params):
start = params.pop(inputs[0].name_hint).asnumpy()[0]
limit = params.pop(inputs[1].name_hint).asnumpy()[0] \
if hasattr(inputs[1], "name_hint") else params.pop('Rank').asnumpy()[0]
delta = params.pop(inputs[2].name_hint).asnumpy()[0]
dtype = attr['dtype'].name if 'dtype' in attr else "int32"
return AttrCvt(
op_name="arange",
ignores=['Tidx'],
extras={'start': _expr.const(start),
"stop": _expr.const(limit),
'step': _expr.const(delta),
'dtype': dtype})([], attr)
return _impl
def _elu():
def _impl(inputs, attr, params):
dtype = attr['T'].name
alpha = tvm.relay.const(-1.0, dtype)
return alpha * _op.nn.relu(tvm.relay.const(1, dtype) \
- _op.exp(inputs[0])) + _op.nn.relu(inputs[0])
return _impl
def _selu():
def _impl(inputs, attr, params):
dtype = attr['T'].name
alpha = tvm.relay.const(-1.6732632423543772848170429916717, dtype)
gamma = tvm.relay.const(1.0507009873554804934193349852946, dtype)
return gamma * (alpha * _op.nn.relu(tvm.relay.const(1, dtype) \
- _op.exp(inputs[0])) + _op.nn.relu(inputs[0]))
return _impl
def _mean():
def _impl(inputs, attr, params):
axis = _get_tuple_param(params, inputs[1])
return AttrCvt(op_name="mean", ignores=['Tdim', 'Tidx'],
transforms={'keep_dims': 'keepdims'},
extras={'axis': axis})([inputs[0]], attr)
return _impl
def _broadcast(name):
def _impl(inputs, attr, params):
return AttrCvt(
op_name=name,
ignores=['name', 'Tidx']
)(inputs, attr)
return _impl
def _split(has_size_vector):
# TF documentation https://www.tensorflow.org/api_docs/python/tf/split
def _impl(inputs, attr, params):
try:
# order and number of inputs are different:
# if has_size_vector:
# https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/split-v
# else:
# https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/split
# in addition, `axis` and `num_or_size_splits` can be tensors in TensorFlow,
# we can only support constants
if has_size_vector:
input_node_index = 0
input_axis_index = 2
size_splits = _get_param(params, inputs[1])
section_beginnings = np.cumsum(size_splits)[:-1]
indices_or_sections = tuple(section_beginnings)
else:
input_node_index = 1
input_axis_index = 0
indices_or_sections = attr['num_split']
input_node = inputs[input_node_index]
axis_input_value = _get_num_param(params, inputs[input_axis_index])
except (IndexError, KeyError):
raise TypeError( \
"Unsupported argument for split: `axis` and `num_or_size_splits` " \
"should be constants")
return _op.split(input_node,
indices_or_sections=indices_or_sections,
axis=int(axis_input_value))
return _impl
def _unpack():
def _impl(inputs, attr, params):
input_node = inputs[0]
axis = attr['axis']
input_shape = attr['_input_shapes'][input_node]
axis_length = input_shape[axis]
if axis_length < 0:
raise TypeError("Unstack with unknown axis length")
splitted = _op.split(input_node,
indices_or_sections=axis_length,
axis=axis)
#name=attr.get('_node_name', 'unstack'))
if axis == 0:
axis = None
else:
axis = [axis]
return _expr.TupleWrapper(
_expr.Tuple([_op.squeeze(split_item, axis=axis) \
for split_item in splitted]), len(splitted))
return _impl
def _softmax():
def _impl(inputs, attr, params):
return AttrCvt(op_name='softmax',
transforms={'axis': ('axis', 1)})([inputs[0]], attr)
return _impl
def _softplus():
# op description: https://www.tensorflow.org/api_docs/python/tf/math/softplus
def _impl(inputs, attr, params):
exp_out = AttrCvt('exp')(inputs, attr)
inputs.append(tvm.relay.const(1, attr['T'].name))
rh = tvm.relay.const(1, attr['T'].name)
add_out = _get_relay_op('add')(exp_out, rh)
return _get_relay_op('log')(add_out)
return _impl
def _topk():
def _impl(inputs, attr, params):
k = int(params.pop(inputs.pop(1).name_hint).asnumpy())
if k < 1:
raise tvm.error.OpAttributeInvalid(
'Attribute k must be positive in operator TopKV2')
if attr['sorted'] is False:
raise tvm.error.OpAttributeUnimplemented(
'Attribute sorted=False is not supported in operator TopKV2')
return AttrCvt(op_name='topk',
ignores=['sorted'],
extras={'k': k, 'is_ascend': False, 'dtype': 'int32'})(inputs, attr)
return _impl
def _floordiv():
def _impl(inputs, attr, params):
assert len(inputs) == 2
div = AttrCvt('divide')(inputs, attr)
return _get_relay_op('floor')(div)
return _impl
def _logical(name):
def _impl(inputs, attr, params):
return AttrCvt(op_name=name)(inputs, attr)
return _impl
def _space_to_batch_nd():
def _impl(inputs, attr, params):
input_node = inputs[0]
input_shape = attr['_input_shapes'][input_node]
block_shape = _get_list_param(params, inputs[1])
paddings = _get_list_param(params, inputs[2])
N = len(input_shape)
M = len(block_shape)
batch = input_shape[0]
remaining_shape_length = N - M - 1
paddings = [(0, 0)] + paddings + [(0, 0)] * remaining_shape_length
# From https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/space-to-batch-n-d:
# Zero-pad the start and end of dimensions [1, ..., M] of the input according to paddings
# to produce padded of shape padded_shape.
padded = tvm.relay.nn.pad(input_node, pad_width=paddings)
# Reshape padded to reshaped_padded of shape:
# [batch] + [padded_shape[1] / block_shape[0], block_shape[0], ...,
# padded_shape[M] / block_shape[M-1], block_shape[M-1]] + remaining_shape
shape1 = [batch] + [item for i in range(M) for item in [-4, -1, block_shape[i]]] + [-2]
reshaped_padded = tvm.relay.reshape(padded, newshape=shape1)
# Permute dimensions of reshaped_padded to produce permuted_reshaped_padded of shape:
# block_shape + [batch] + [padded_shape[1] / block_shape[0], ...,
# padded_shape[M] / block_shape[M-1]] + remaining_shape
axes = [2 * i + 2 for i in range(M)] + [0] + [2 * i + 1 for i in range(M)] + \
list(range(1 + 2 * M, 1 + 2 * M + remaining_shape_length))
permuted_reshaped_padded = tvm.relay.transpose(reshaped_padded, axes=axes)
permuted_reshaped_padded_shape = _infer_shape(permuted_reshaped_padded, params)
# Reshape permuted_reshaped_padded to flatten block_shape into the batch dimension,
# producing an output tensor of shape:
# [batch * prod(block_shape)] + [padded_shape[1] / block_shape[0], ...,
# padded_shape[M] / block_shape[M-1]] + remaining_shape
shape2 = [batch * np.prod(block_shape)] + list(permuted_reshaped_padded_shape)[M + 1:]
reshaped_permuted_reshaped_padded = tvm.relay.reshape(permuted_reshaped_padded,
newshape=shape2)
return reshaped_permuted_reshaped_padded
return _impl
def _batch_to_space_nd():
def _impl(inputs, attr, params):
input_node = inputs[0]
input_shape = attr['_input_shapes'][input_node]
block_shape = _get_list_param(params, inputs[1])
crops = _get_list_param(params, inputs[2])
M = len(block_shape)
batch = input_shape[0]
# From https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/batch-to-space-n-d:
# Reshape input to reshaped of shape:
# [block_shape[0], ..., block_shape[M-1], batch / prod(block_shape),
# input_shape[1], ..., input_shape[N-1]]
shape1 = block_shape + [batch // np.prod(block_shape)] + input_shape[1:]
reshaped = tvm.relay.reshape(input_node, newshape=shape1)
# Permute dimensions of reshaped to produce permuted of shape
# [batch / prod(block_shape), input_shape[1], block_shape[0], ...,
# input_shape[M], block_shape[M-1], input_shape[M+1], ..., input_shape[N-1]]
axes = [M] + [axis for i in range(M) for axis in [M + i + 1, i]] + \
list(range(2 * M + 1, len(shape1)))
permuted = tvm.relay.transpose(reshaped, axes=axes)
# Reshape permuted to produce reshaped_permuted of shape
# [batch / prod(block_shape), input_shape[1] * block_shape[0], ...,
# input_shape[M] * block_shape[M-1], input_shape[M+1], ..., input_shape[N-1]]
shape2 = [0] + [-3] * M + [-2]
reshaped_permuted = tvm.relay.reshape(permuted, newshape=shape2)
# Crop the start and end of dimensions [1, ..., M] of reshaped_permuted according to crops
# to produce the output of shape:
# [batch / prod(block_shape), input_shape[1] * block_shape[0] - crops[0,0] - crops[0,1],
# ..., input_shape[M] * block_shape[M-1] - crops[M-1,0] - crops[M-1,1],
# input_shape[M+1], ..., input_shape[N-1]]
reshaped_permuted_shape = _infer_shape(reshaped_permuted, params)
cropped = reshaped_permuted
for axis in range(1, M+1):
crop = crops[axis - 1]
if crop != [0, 0]:
indices = tvm.relay.arange(
_expr.const(crop[0]),
_expr.const(reshaped_permuted_shape[axis] - crop[1]),
dtype='int32'
)
cropped = tvm.relay.take(cropped, indices=indices, axis=axis)
return cropped
return _impl
def _prod():
def _impl(inputs, attr, params):
axis = params.pop(inputs[1].name_hint).asnumpy()[0]
keepdims = attr['keep_dims']
return _op.prod(inputs[0], int(axis), keepdims=keepdims)
return _impl
# compatible operators that do NOT require any conversion.
_identity_list = []
# _convert_map defines maps of name to converter functor(callable)
# for 1 to 1 mapping, use Renamer if nothing but name is different
# use AttrCvt if attributes need to be converted
# for 1 to N mapping(composed), use custom callable functions
# for N to 1 mapping, currently not supported(?)
_convert_map = {
'Abs' : AttrCvt('abs'),
'Add' : _elemwise('add'),
'All' : _reduce('all'),
'ArgMax' : _argx(_op.argmax, 'argmax'),
'ArgMin' : _argx(_op.argmin, 'argmin'),
'AvgPool' : _pooling('avg_pool'),
'BatchNormWithGlobalNormalization' : _batch_norm(),
'BatchToSpaceND' : _batch_to_space_nd(),
'BiasAdd' : _bias_add(),
'BroadcastTo' : _broadcast_to(),
'Cast' : _cast(),
'Ceil' : AttrCvt('ceil'),
'CheckNumerics' : _check_numerics(),
'ClipByValue' : _clip_by_value(),
'Concat' : _concat(),
'ConcatV2' : _concatV2(),
'Conv2D' : _conv('conv'),
'CropAndResize' : _crop_and_resize(),
'DecodeJpeg' : _decode_image(),
'DepthwiseConv2dNative' : _conv('depthwise'),
'DepthToSpace' : _depth_to_space(),
'Equal' : _broadcast('equal'),
'Elu' : _elu(),
'Exp' : AttrCvt('exp'),
'ExpandDims' : _expand_dims(),
'Fill' : _fill(),
'Floor' : AttrCvt('floor'),
'FloorDiv' : _floordiv(),
'FusedBatchNorm' : _fused_batch_norm(),
'FusedBatchNormV2' : _fused_batch_norm(),
'Gather' : _gather(),
'GatherNd' : _gather_nd(),
'GatherV2' : _gather(),
'Greater' : _broadcast('greater'),
'GreaterEqual' : _broadcast('greater_equal'),
'Identity' : _identity(),
'LeakyRelu' : AttrCvt('leaky_relu'),
'LeftShift' : AttrCvt('left_shift'),
'Less' : _broadcast('less'),
'LessEqual' : _broadcast('less_equal'),
'Log' : AttrCvt('log'),
'LogicalAnd' : _logical('logical_and'),
'LogicalOr' : _logical('logical_or'),
'LogicalNot' : _logical('logical_not'),
'LogSoftmax' : AttrCvt('log_softmax'),
'LRN' : _lrn(),
'MatMul' : _matmul(),
'Max' : _reduce('max'),
'MaxPool' : _pooling('max_pool'),
'Maximum' : _elemwise('maximum'),
'Mean' : _mean(),
'Min' : _reduce('min'),
'Minimum' : _elemwise('minimum'),
'Mod' : _elemwise('mod'),
'Mul' : _elemwise('multiply'),
'Neg' : AttrCvt('negative'),
'NotEqual' : _broadcast('not_equal'),
'Pack' : _pack(),
'Pad' : _pad('Pad'),
'PadV2' : _pad('PadV2'),
'Pow' : _elemwise('power'),
'Prod' : _prod(),
'Range' : _range(),
'Rank' : _rank(),
'RealDiv' : _elemwise('divide'),
'Relu' : AttrCvt('relu'),
'Relu6' : _relu6(),
'Reshape' : _reshape(),
'ResizeBilinear' : _resize_bilinear(),
'ResizeBicubic' : _resize_bilinear(),
'ResizeNearestNeighbor' : _resize_nearest_neighbor(),
'ReverseV2' : _reverse_v2(),
'RightShift' : AttrCvt('right_shift'),
'Round' : AttrCvt('round'),
'Rsqrt' : _rsqrt(),
'Select' : _where(),
'Selu' : _selu(),
'Shape' : _shape(),
'Sigmoid' : AttrCvt('sigmoid'),
'Sign' : AttrCvt('sign'),
'Slice' : _slice(),
'Softmax' : _softmax(),
'Softplus' : _softplus(),
'SpaceToBatchND' : _space_to_batch_nd(),
'Split' : _split(False),
'SplitV' : _split(True),
'Sqrt' : AttrCvt('sqrt'),
'Square' : _square(),
'Squeeze' : _squeeze(),
'StridedSlice' : _stridedSlice(),
'Sub' : _elemwise('subtract'),
'Sum' : _sum(),
'Tanh' : AttrCvt('tanh'),
'Tile' : _tile(),
'TopKV2' : _topk(),
'Transpose' : _transpose(),
'TruncateMod' : _elemwise('mod'),
'Unpack' : _unpack(),
'ZerosLike' : AttrCvt('zeros_like'),
}
def _LSTMBlockCell():
def _impl(inputs, in_state_c, in_state_h, attr, params):
"""LSTM Block cell.
Calculations are described in: https://github.com/tensorflow/tensorflow/blob/
r1.8/tensorflow/contrib/rnn/python/ops/lstm_ops.py#L41-L114
Parameters
----------
inputs : relay.Expr
Input data
in_state_c: list of relay.Expr
Cell state input values for all the layers
in_state_h: list of relay.Expr
Hidden state input values for all the layers
attrs : dict
Dict of operator attributes
params : dict
List of pretrained weights and bias
Returns
-------
sym : relay.Expr
Converted relay.Expr
output: relay.Expr
Output state value.
"""
in_data = inputs[0]
in_weight = inputs[3]
in_bias = inputs[7]
forget_bias = attr.pop('forget_bias')
input_shape = attr['_input_shapes'][inputs[0]]
weight_shape = attr['_input_shapes'][inputs[3]]
batch_size, input_size = input_shape[0], input_shape[1]
num_hidden_layers = weight_shape[1]
num_hidden = num_hidden_layers // 4
in_data = _op.reshape(in_data,
newshape=(batch_size, input_size))
ixh = _op.concatenate([in_data, in_state_h], axis=1)
in_weight = _op.transpose(in_weight, axes=None)
gates = _op.nn.dense(ixh, in_weight,
units=num_hidden_layers)
gates_bias = _op.add(gates, in_bias)
gate_list = _op.split(gates_bias, indices_or_sections=4, axis=1)
in_gate = _op.sigmoid(gate_list[0])
in_transform = _op.tanh(gate_list[1])
forget_gate = _op.add(gate_list[2], tvm.relay.const(forget_bias, attr['T'].name))
forget_gate = _op.sigmoid(forget_gate)
out_gate = _op.sigmoid(gate_list[3])
next_c = _op.add(_op.multiply(forget_gate, in_state_c),
_op.multiply(in_gate, in_transform))
next_h = out_gate * _op.tanh(next_c)
out_state = _op.concatenate([next_c, next_h], axis=1)
out_state = _op.reshape(out_state,
newshape=(2, batch_size, num_hidden))
return next_h, out_state
return _impl
# _convert_map_rnn defines maps of rnn operator name to
# converter functor(callable) for 1 to 1 mapping.
_convert_map_rnn = {
'LSTMBlockCell' : _LSTMBlockCell(),
}
class RecurrentNetworks(object):
"""Recurrent network layer handlers.
Handle Layer operations.
ToDo: Operators like RNN/GRU layer concepts also can be handled here
Parameters
----------
nodes : list
list of graph nodes used for tensorflow parsing.
out_rnn : list
List of RecurrentNetwork outputs. This output will be appended to the
'head' nodes of the graph.
graph : tensorflow graph definition object
The loaded tensorflow GraphDef
convert_map : dict
Dict of name : callable, where name is the op's name that
require conversion to relay, callable are functions which
take attrs and return (new_op_name, new_attrs)
"""
def __init__(self, nodes, out_rnn, graph, convert_map):
self._graph = graph
self._convert_map = convert_map
self._nodes = nodes
self._out_rnn = out_rnn
self._cur_lstm_layer = 0
self._layer_name_list = []
self._recurrent_ops_layer_map = {
'LSTMBlockCell' : self._LSTMBlockCellLayer(),
}
def _LSTMBlockCellLayer(self):
"""LSTMBlockCell layer handler.
Parameters
----------
op_name : str
Operator name, eg:LSTMBlockCell
layer_name : str list
Layer name is used for creating the state input placeholder.
inputs : relay.Expr
Input data
attrs : dict
Dict of operator attributes
params : dict
List of pretrained weights and bias
num_layers : int
Total number of LSTM layer presented in the graph
Returns
-------
sym : relay.Expr
The returned relay Expr
"""
def _impl(op_name, layer_name, inputs, attrs, params, num_layers):
in_state_c_name = layer_name+'_c'
in_state_h_name = layer_name+'_h'
def _init_state(num_layers, batch_size, num_hidden):
"""Create the initial states for the first layer in the graph."""
in_state_c = [_expr.var(in_state_c_name,
shape=(num_layers, batch_size, num_hidden),
dtype='float32')]
in_state_h = [_expr.var(in_state_h_name,
shape=(num_layers, batch_size, num_hidden),
dtype='float32')]
return in_state_c, in_state_h
def _get_cur_input_state(in_state_c, in_state_h, num_layers,
layer, batch_size, num_hidden):
"""Select the appropriate states for the current layer"""
in_state_c_tup = _op.split(in_state_c[0],
indices_or_sections=num_layers, axis=0)
in_state_h_tup = _op.split(in_state_h[0],
indices_or_sections=num_layers, axis=0)
cur_in_state_c = _op.reshape(in_state_c_tup[layer],
newshape=(batch_size, num_hidden))
cur_in_state_h = _op.reshape(in_state_h_tup[layer],
newshape=(batch_size, num_hidden))
return cur_in_state_c, cur_in_state_h
def _LSTMBlockCellWrapper(inputs, attr, params,
num_layers, layer):
"""LSTM cell warapper to prepare the inputs"""
input_shape = attr['_input_shapes'][inputs[0]]
weight_shape = attr['_input_shapes'][inputs[3]]
batch_size = input_shape[0]
num_hidden = weight_shape[1] // 4
if layer == 0:
#Create initial states placeholder in case of first layer
in_state_c, in_state_h = _init_state(num_layers,
batch_size, num_hidden)
else:
in_state_c = self._nodes[in_state_c_name]
in_state_h = self._nodes[in_state_h_name]
cur_in_state_c, cur_in_state_h = _get_cur_input_state( \
in_state_c, in_state_h,
num_layers, layer,
batch_size, num_hidden)
output, out_state = self._convert_map[op_name](inputs, cur_in_state_c,
cur_in_state_h,
attr, params)
return output, out_state, in_state_c, in_state_h
sym, cur_out_state, in_state_c, in_state_h = \
_LSTMBlockCellWrapper(inputs, attrs, params,
num_layers, self._cur_lstm_layer)
self._nodes[in_state_c_name] = in_state_c
self._nodes[in_state_h_name] = in_state_h
cur_out_state = _op.expand_dims(cur_out_state, axis=0, num_newaxis=1)
self._out_rnn.append(cur_out_state)
self._cur_lstm_layer += 1
return sym
return _impl
def process_op(self, op_name, inputs, attrs, params):
"""Process recurrent layer operators.
List '_recurrent_ops_layer_map' map each Layer based operators with its
layer handlers. Total number of layers are calculated to form the input
data shapes.
Parameters
----------
op_name : str
Operator name, such as LSTMBlockCell
inputs : relay.Expr
Input data
attrs : dict
Dict of operator attributes
params : dict
List of pretrained weights and bias
Returns
-------
sym : relay.Expr
Returns relay.Expr
"""
def _get_abs_layer_name(node):
"""Identify the layer name is already handled. Return the absolute name
"""
if not self._layer_name_list:
self._layer_name_list.append(node.name)
return node.name
for _name in self._layer_name_list:
if _name in node.name:
abs_name = _name
else:
self._layer_name_list.append(node.name)
abs_name = node.name
return abs_name
#Find number of layers of this same operator node in the graph
#and also read the inputs name for the current op.
num_layers = 0
for _, node in enumerate(self._graph.node):
if node.op == op_name:
layer_name = _get_abs_layer_name(node)
num_layers += 1
sym = self._recurrent_ops_layer_map[op_name](op_name, layer_name, inputs, attrs,
params, num_layers)
return sym
# An internal list to contain all the control flow primitives used in Tensorflow
# 1.x.
_control_flow_nodes = ['Merge', 'Switch', 'NextIteration', 'Exit', 'Enter', 'LoopCond']
class RewriteSubgraph(ExprMutator):
"""
A helper class to rewrite expr in while loop function to variable
Parameters
----------
rewrite_map : Dict[expr, expr]
A dictionay contains a set of expr to var mapping.
"""
def __init__(self, rewrite_map):
ExprMutator.__init__(self)
self.rewrite_map = rewrite_map
def visit(self, expr):
if expr in self.rewrite_map:
return self.rewrite_map[expr]
return super().visit(expr)
def rewrite_subgraph(expr, rewrites):
return RewriteSubgraph(rewrites).visit(expr)
def _in_while_loop(control_flow_node_map, op_name):
"""
Check if a given control flow operator is part of a while loop execution
frame. This is based on the fact that there is only one occurrence of
`LoopCond` for a loop execution frame and it is only presented in the loop
construct.
Parameters
----------
control_flow_node_map : Dict[str, Set[str]]
A dictionay contains the unique control flow execution frame name to
a set of primitive operators mapping.
op_name : str
The name of a control flow primitive.
Returns
-------
ret : bool
Return true if the operator is in a while loop execution frame,
otherwise, return false.
"""
return op_name in control_flow_node_map and \
"LoopCond" in control_flow_node_map[op_name]
class Branch:
"""A class contains the components that are used to build up a Relay if
node.
Parameters
----------
cond : tvm.relay.Expr
The condition of a if node.
true_branch : tvm.relay.Expr
The body of the true branch of a if expression.
false_branch: tvm.relay.Expr
The body of the false branch of a if expression.
_if : tvm.relay.Expr
An internal variable indicates where an if expression is already created
for a matched TF condition construct.
Examples
--------
The following is a cond statement written in TensorFlow:
.. code-block:: python
def vanilla_cond():
i = tf.constant(1)
j = tf.constant(4)
def f1():
return tf.multiply(1, 17)
def f2():
return tf.add(4, 23)
r = tf.cond(tf.less(i, j), f1, f2)
This condition statement should be converted into Relay in the following
form:
.. code-block:: python
fn (%Const: Tensor[(1,), int32],
%Const_1: Tensor[(1,), int32],
%cond/Mul/x: Tensor[(1,), int32],
%cond/Mul/y: Tensor[(1,), int32],
%cond/Add/x: Tensor[(1,), int32],
%cond/Add/y: Tensor[(1,), int32]) {
%0 = less(%Const, %Const_1) # ty=Tensor[(1,), bool]
%1 = min(%0)
if (%1) {
%2 = multiply(%cond/Mul/x, %cond/Mul/y)
%2
} else {
%3 = add(%cond/Add/x, %cond/Add/y)
%3
}
}
"""
def __init__(self):
self._if = None
self.cond = None
self.true_branch = None
self.false_branch = None
def _if_node(self):
"""An internal API to create a relay if node from the matched TF
condition construct.
"""
# `cond` returns a tensor that contains boolean values. We add a `min`
# operator to checks if there is any false value. If so, this condition
# doesn't not hold.
cond = tvm.relay.op.min(self.cond)
return tvm.relay.If(cond, self.true_branch, self.false_branch)
def if_node(self):
"""Create an tvm.relay.If node if it hasn't been created yet."""
if self._if is None:
self._if = self._if_node()
return self._if
class Loop:
"""
A class contains the components that are used to build up a Relay
recursive call.
Parameters
----------
loop_vars : List[tvm.relay.Expr]
The loop variables that used in a while loop.
cond : tvm.relay.Expr
The condition of a while loop.
body : tvm.relay.Expr
The body of a matched while loop.
_loop : tvm.relay.Expr
An internal variable indicates where a recursive call is already created
for a matched TF while loop construct.
Examples
--------
The following is a vanilla loop from TensorFlow:
.. code-block:: python
i = tf.constant(0)
c = lambda i: tf.less(i, 10)
b = lambda i: tf.add(i, 1)
r = tf.while_loop(c, b, [i])
It will be converted to the following recursive call in Relay:
.. code-block:: python
fn (%while/Less/y: Tensor[(1,), int32],
%while/Add/y: Tensor[(1,), int32],
%Const: Tensor[(1,), int32]) {
%0 = fn(%loop_var0: Tensor[(1,), int32]) {
%1 = less(%loop_var0, %while/Less/y)
%2 = min(%1)
if (%2) {
%3 = add(%loop_var0, %while/Add/y)
free_var %while_loop
%4 = %while_loop(%3)
%4
} else {
%5 = (%loop_var0,)
%5
}
}
let %while_loop1 = %0
%6 = %while_loop1(%Const)
%6
}
"""
def __init__(self):
self.loop_vars = []
self.cond = None
self.body = []
self._loop = None
def _while_loop(self):
"""An internal API to create a Relay recursive call for a matched TF
`while_loop` construct.
"""
wl = tvm.relay.var('while_loop')
sb = tvm.relay.scope_builder.ScopeBuilder()
loop_vars = []
bind_map = {}
for i, var in enumerate(self.loop_vars):
if not isinstance(var, _expr.Var):
var_chk = _infer_type(var)
var_type = var_chk.checked_type
else:
var_type = var.type_annotation
v = tvm.relay.var("loop_var" + str(i), type_annotation=var_type)
loop_vars.append(v)
bind_map[var] = v
self.cond = rewrite_subgraph(self.cond, bind_map)
self.body = [rewrite_subgraph(b, bind_map) for b in self.body]
cond = tvm.relay.op.min(self.cond)
with sb.if_scope(cond):
sb.ret(wl(*self.body))
with sb.else_scope():
sb.ret(tvm.relay.Tuple(loop_vars))
loop_fn = tvm.relay.Function(loop_vars, sb.get())
sb = tvm.relay.scope_builder.ScopeBuilder()
sb.let(wl, loop_fn)
sb.ret(wl(*self.loop_vars))
return sb.get()
def while_loop(self):
"""Instantiate a while loop if it has not been created yet."""
if self._loop is None:
self._loop = self._while_loop()
return self._loop
return self._loop
class GraphProto(object):
""" A helper class for handling relay graph copying from Tensorflow GraphDef.
Definition:
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/framework/graph.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._input_shapes = {}
self._output_shapes = {}
self._num_param = 0
self._num_rnn_layer = False
self._input_shapes = {}
self._loops = {}
self._branches = {}
self._mod = _module.Module({})
def from_tensorflow(self, graph, layout="NHWC", shape=None, outputs=None):
"""Construct relay nodes from tensorflow graph definition - GraphDef.
Follow the tensorflow graph definition to parse and convert it to Relay.
Some of the assumptions listed below.
-> All Placeholders are considered as graph input.
-> All Const nodes are params.
-> Last node is assumed as graph output.
-> _output_shapes : Graph should be frozen with add_shapes=True.
Or user can pass input shape dictionary optionally.
-> DecodeJpeg, ResizeBilinear: These are dummy operators.
Hence user should handle preprocessing outside.
-> CheckNumerics: No implementation as of now for this.
Just copies input to output.
Parameters
----------
graph : tensorflow graph definition object
The loaded tensorflow GraphDef
layout : target layout to be used (Optional)
NCHW only supported now to enable NHWC models on GPU.
shape : Dictionary of input dimensions (Optional)
Graph level input shape dictionary.
outputs : List of output tensor names (Optional)
if not specified then the last node is assumed as graph output.
Returns
-------
mod : tvm.relay.Module
The module that optimizations will be performed on.
params : dict
A dict of name: tvm.nd.array pairs, used as pretrained weights
"""
try:
from tensorflow.python.framework import tensor_util
except ImportError as e:
raise ImportError(
"Unable to import tensorflow which is required {}".format(e))
missing_operators = self._parse_import_prerequisites(graph)
if missing_operators:
raise NotImplementedError( \
"The following operators are not implemented: {}".format(missing_operators))
control_flow_node_map = defaultdict(set)
for node in graph.node:
node_name_prefix = node.name.rsplit('/', 1)[0]
control_flow_node_map[node_name_prefix].add(node.op)
if node.op == 'Placeholder' or node.op == 'PlaceholderWithDefault':
# Give priority to user argument.
if shape and node.name in shape:
self._input_shapes[node.name] = list(shape[node.name])
else:
self._input_shapes[node.name] = \
tensor_util.TensorShapeProtoToList(node.attr['shape'].shape)
for idx, dim in enumerate(self._input_shapes[node.name]):
if dim < 0:
self._input_shapes[node.name][idx] = 1
warnings.warn("Use 1 instead of -1 in shape of operator %s."
% node.name)
self._output_shapes[node.name] = [self._input_shapes[node.name]]
attr = self._parse_attr(node.attr)
self._nodes[node.name] = [_expr.var(node.name,
shape=self._input_shapes[node.name],
dtype=attr['dtype'].name)]
# Ignore user's input shape for Non placeholder
elif node.op == 'Const':
tensor_value = node.attr['value'].tensor
self._input_shapes[node.name] = \
tensor_util.TensorShapeProtoToList(tensor_value.tensor_shape)
if shape and node.name in shape:
warnings.warn("Ignore the passed shape. Shape in graphdef "
"will be used for operator %s." % node.name)
# Parse the nodes to re-create TF graph using Relay operators.
for node in graph.node:
# Tensorflow doesn't have separate list for params extraction.
# Operator name 'Const' is treated as a parameter to build params dict.
input_shapes = {}
attr = self._parse_attr(node.attr)
# Variable converted to Const will not have only value attr
if 'value' in attr and node.op == 'Const':
self._output_shapes[node.name] = [self._input_shapes[node.name]]
elif '_output_shapes' in attr:
self._output_shapes[node.name] = \
[tensor_util.TensorShapeProtoToList(tshape) \
for tshape in attr['_output_shapes']]
else:
# Keep the list indexable to avoid key error.
# Actual value will be filled after node creation.
# Will infer shapes if the graph is not frozen with add_shapes=True
self._output_shapes[node.name] = [None]
if node.op == "Const":
# All Const nodes are Param nodes, lets parse
self._num_param += 1
for key, value in node.attr.items():
self._parse_param(key, value, node.name, shape)
if node.name not in self._nodes:
raise NotImplementedError( \
"Const {} couldn't be converted to Param.".format(node.name))
attr = self._parse_attr(node.attr)
elif node.op != "Placeholder" and node.op != 'PlaceholderWithDefault':
# Pass the parsed shapes instead
attr["_output_shapes"] = output_shapes = self._output_shapes[node.name]
# Pass the node name too in attr
attr["_node_name"] = node.name
# Pass the target layout
attr["_target_layout"] = layout
#ToDo: Some of the tensorflow operators internaly maintain
#execution layers and its output name will the layer number along with
#graph node name.eg: Node name:- 'Model/RNN/cell_0/RnnCell', but the
#output name will be 'Model/RNN/cell_0/RnnCell:0'. In this case,
#the digit has to be ignored.
if ":" in node.input[0]:
in_name, _ = node.input[0].split(':')
node.input[0] = in_name
# Fill shapes for all inputs in a list
inputs = []
for i in node.input:
# Some TensorFlow operators internally maintain execution layers
# and their output name includes the layer number along with
# graph node name. E.g. the node name is 'Model/RNN/cell_0/RnnCell', but the
# output tensor name is 'Model/RNN/cell_0/RnnCell:0'. In this case,
# the number has to be ignored for single-output nodes.
# On the other hand, for multi-output nodes the number is the output index,
# and the lack of the number implies 0.
tensor_name = i.split(':')
node_name = tensor_name[0]
if node_name in self._nodes:
in_sym = self._nodes[node_name]
if isinstance(in_sym, _expr.TupleWrapper):
tensor_slot = int(tensor_name[1]) if len(tensor_name) > 1 else 0
in_sym = [in_sym[tensor_slot]]
input_shape = self._output_shapes[node_name][tensor_slot]
else:
tensor_slot = 0
input_shape = self._output_shapes[node_name][0]
inputs.append(in_sym[0])
input_shapes[in_sym[0]] = input_shape
attr['_input_shapes'] = input_shapes
if node.op in _control_flow_nodes:
op = self._convert_control_flow_operator(node, inputs,
attr,
control_flow_node_map)
else:
op = self._convert_operator(node.op, inputs, attr, graph)
# Check if op is converted to param
if isinstance(op, np.ndarray):
self._params[node.name] = tvm.nd.array(op)
op = [_expr.var(node.name,
shape=self._params[node.name].shape,
dtype=self._params[node.name].dtype)]
elif isinstance(op, (_expr.TupleWrapper, tuple, list)):
pass
elif isinstance(op, _expr.Expr):
op = [op]
else:
raise RuntimeError("unexpected type %s" % type(op))
self._nodes[node.name] = op
# Infer shapes even without specifying "add_shapes=True"
if output_shapes == [None]:
out_shapes = [_infer_shape(node_item) for node_item in self._nodes[node.name]]
self._output_shapes[node.name] = out_shapes
if self._output_shapes[node.name] and shape and node.name in shape:
assert self._output_shapes[node.name] == list(shape[node.name])
# Infer shapes if passed explicitly
node_output = self._nodes[node.name]
if shape and (not self._output_shapes[node.name][0]
or -1 in self._output_shapes[node.name][0]):
out_shapes = [_infer_shape(node_item) for node_item in node_output]
self._output_shapes[node.name] = out_shapes
out = []
if outputs is None:
if node.op == "Exit":
out = [op[0].tuple_value]
else:
out = op
else:
for out_name in outputs:
if ":" in out_name:
out_name, out_num = out_name.split(":")
out_num = int(out_num)
out.append(self._nodes[out_name][out_num])
else:
out.append(self._nodes[out_name][0])
#Add the RNN outputs also with 'head' nodes of the relay graph
if self._num_rnn_layer:
if len(self._out_rnn) == 1:
out.append(self._out_rnn[0])
else:
out_rnn = _op.concatenate(self._out_rnn, axis=0)
out.append(out_rnn)
out = out[0] if len(out) == 1 else _expr.Tuple(out)
func = _expr.Function(analysis.free_vars(out), out)
self._mod["main"] = func
return self._mod, self._params
def _parse_import_prerequisites(self, graph):
""" Calculate the named preconditions from TensorFlow `graph`.
Return prerequisites for parsing:
a. Set of operator names which don't have their mapping in TVM, i.e.
which are not supported
"""
missing_operators = set()
for node in graph.node:
if node.op == "Placeholder" or node.op == 'PlaceholderWithDefault':
pass
elif node.op == "Const":
pass
else:
if any([node.op in t for t in [_identity_list, _convert_map,
_convert_map_rnn,
_control_flow_nodes]]):
pass
else:
missing_operators.add(node.op)
return missing_operators
def _parse_param(self, key, value, name, shape):
try:
from tensorflow.python.framework import tensor_util
except ImportError as e:
raise ImportError(
"Unable to import tensorflow which is required {}".format(e))
if key == 'value':
np_array = tensor_util.MakeNdarray(value.tensor)
if np_array.dtype == np.dtype(object):
# Object types are generally tensorflow DT_STRING (DecodeJpeg op).
# Just leave it as placeholder.
self._nodes[name] = [_expr.var(name, shape=shape[name], dtype='uint8')]
return
array_ndim = len(np_array.shape)
if array_ndim == 0:
new_array = np.empty([1], dtype=np_array.dtype)
new_array[0] = np_array
self._params[name] = tvm.nd.array(new_array)
else:
self._params[name] = tvm.nd.array(np_array)
self._nodes[name] = [_expr.var(name,
shape=self._params[name].shape,
dtype=self._params[name].dtype)]
else:
if key not in ('dtype', '_output_shapes', '_class'):
raise NotImplementedError \
("Other attributes for a Const(param) Node {} ? .".format(key))
def _get_attr(self, buf):
"""Returns the value of the attr of this buf with the given `name`.
Args:
buf: attrvalue protobuf.
Returns:
The value of the attr, as a Python object.
Raises:
ValueError: If this op does not have an attr with the given `name`.
"""
fields = ["s", "i", "f", "b", "type", "shape", "tensor", "func"]
x = buf
ret = []
try:
from tensorflow.python.framework import dtypes
except ImportError as e:
raise ImportError(
"Unable to import tensorflow which is required {}".format(e))
# Treat an empty oneof value as an empty list.
if not x.WhichOneof("value"):
return ret
if x.HasField("list"):
for f in fields:
if getattr(x.list, f):
if f == "type":
ret += [dtypes.as_dtype(x) for x in list(getattr(x.list, f))]
else:
ret += list(getattr(x.list, f))
else:
for f in fields:
if x.HasField(f):
if f == "type":
ret = dtypes.as_dtype(getattr(x, f))
else:
ret = getattr(x, f)
return ret
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for key, value in attr_proto.items():
attrs[key] = self._get_attr(value)
return attrs
def _convert_rnn_operator(self, op_name, inputs,
attrs, params, graph, convert_map):
"""Convert RNN and its variant operators to Relay operators.
This converter read the input states of each layers and
also maintain the output states of each layer in a list.
Parameters
----------
op_name : str
Operator name, such as LSTMBlockCell
inputs : list of relay.Expr
List of input symbols.
attrs : dict
Dict of operator attributes
params : dict
List of pretrained weights and bias
graph : Tensorflow graph object
Graph is to find the number of upcoming same operator to
calculate the number of layers.
convert_map : dict
Dict of name : callable, where name is the op's name that
require conversion to relay, callable are functions which
take attrs and return (new_op_name, new_attrs)
Returns
-------
sym : relay.Expr
Converted relay.Expr
"""
if not self._num_rnn_layer:
self._out_rnn = []
self.rnn = RecurrentNetworks(self._nodes, self._out_rnn, graph, convert_map)
self._num_rnn_layer = True
sym = self.rnn.process_op(op_name, inputs, attrs, params)
return sym
def _convert_control_flow_operator(self, node, inputs, attrs, control_flow_node_map):
"""
Convert the Relay control flow primitive into corresponding component
of a Relay control flow construct, i.e. `tf.cond` and `tf.while_loop`
are converted in Relay `If` and recusrive call, respectively.
Parameters
----------
node: TensorFlow graph node object.
A TensorFlow graph node object.
inputs : List[tvm.relay.Expr]
List of input symbols.
attrs : Dict[tvm.Attrs]
Dict of operator attributes.
control_flow_node_map : Dict[str, Set[str]]
A dictionary contains the execution frame name to primitives
mapping.
Returns
-------
op : tvm.relay.Expr
Converted relay expression.
"""
node_name_prefix = node.name.rsplit('/', 1)[0]
if node.op == "Merge":
if _in_while_loop(control_flow_node_map, node_name_prefix):
op = self._nodes[node.input[0]]
self._loops[node_name_prefix] = Loop()
else:
if len(self._branches) == 0:
raise RuntimeError("Cannot find a created "
"conditional for merge node")
branch = self._branches[node_name_prefix]
false_br = self._nodes[node.input[0]]
true_br = self._nodes[node.input[1]]
assert len(true_br) == 1
assert len(false_br) == 1
branch.true_branch = true_br[0]
branch.false_branch = false_br[0]
op = [branch.if_node()]
elif node.op == "Exit":
loop = self._loops[node_name_prefix]
exit_name = node.name.split('/')[-1]
assert str.startswith(exit_name, 'Exit')
# TensorFlow has differen naming convention on different
# versions.
if '_' in exit_name:
exit_number = int("0" + exit_name[5:])
else:
exit_number = int("0" + exit_name[4:])
expr = loop.while_loop()
op = _expr.TupleGetItem(expr, exit_number)
elif node.op == "Enter":
op = self._nodes[node.input[0]]
elif node.op == "LoopCond":
op = self._nodes[node.input[0]]
assert len(op) == 1
self._loops[node_name_prefix].cond = op[0]
elif node.op == "Switch":
op = self._nodes[node.input[0]]
assert len(op) == 1
if _in_while_loop(control_flow_node_map, node_name_prefix):
self._loops[node_name_prefix].loop_vars.append(op[0])
else:
if node_name_prefix not in self._branches:
self._branches[node_name_prefix] = Branch()
chk_op = _infer_type(op[0])
self._branches[node_name_prefix].cond = chk_op
elif node.op == "NextIteration":
op = self._nodes[node.input[0]]
assert len(op) == 1
self._loops[node_name_prefix].body.append(op[0])
else:
raise Exception("Cannot identify control flow operator: " +
"{}".format(node.op))
return op
def _convert_operator(self, op_name, inputs, attrs,
graph, identity_list=None, convert_map=None):
"""Convert from Tensorflow operator to relay operator.
The converter must specify conversions explicitly for incompatible name, and
apply handlers to operator attributes.
Parameters
----------
op_name : str
Operator name, such as Conv2D, AvgPool
inputs : list of relay.op
List of input symbols.
attrs : dict
Dict of operator attributes
identity_list : list
List of operators that don't require conversion
convert_map : dict
Dict of name : callable, where name is the op's name that
require conversion to relay, callable are functions which
take attrs and return (new_op_name, new_attrs)
Returns
-------
sym : relay.op
Converted relay operator
"""
identity_list = identity_list if identity_list else _identity_list
convert_map = convert_map if convert_map else _convert_map
convert_map_rnn = _convert_map_rnn
if op_name in identity_list:
sym = _get_relay_op(op_name)(*inputs, **attrs)
elif op_name in convert_map:
sym = convert_map[op_name](inputs, attrs, self._params)
elif op_name in convert_map_rnn:
sym = self._convert_rnn_operator(op_name, inputs, attrs,
self._params, graph,
convert_map_rnn)
else:
raise NotImplementedError("Operator {} not implemented.".format(op_name))
return sym
def from_tensorflow(graph, layout="NHWC", shape=None, outputs=None):
"""Load tensorflow graph which is a python tensorflow graph object into relay.
The companion parameters will be handled automatically.
Parameters
----------
graph : GraphDef object
Tensorflow GraphDef
layout : target layout to be used (Optional)
NCHW only supported now to enable NHWC models on GPU.
shape : Dictionary of input dimensions (Optional)
Graph level input shape dictionary.
outputs : List of output tensor names (Optional)
if not specified then the last node is assumed as graph output.
Returns
-------
mod : tvm.relay.Module
The module that optimizations will be performed on.
params : dict of str to tvm.ndarray
Dict of converted parameters stored in tvm.ndarray format
"""
g = GraphProto()
mod, params = g.from_tensorflow(graph, layout, shape, outputs)
return mod, params
| {
"content_hash": "3ec570b201f151088f040653b273fb36",
"timestamp": "",
"source": "github",
"line_count": 2407,
"max_line_length": 99,
"avg_line_length": 39.498130452845864,
"alnum_prop": 0.5209104678559408,
"repo_name": "mlperf/training_results_v0.7",
"id": "6fff40a922940ad6cdd0c4e8f8746689e94bb5e7",
"size": "95952",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/tvm/python/tvm/relay/frontend/tensorflow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Awk",
"bytes": "14530"
},
{
"name": "Batchfile",
"bytes": "13130"
},
{
"name": "C",
"bytes": "172914"
},
{
"name": "C++",
"bytes": "13037795"
},
{
"name": "CMake",
"bytes": "113458"
},
{
"name": "CSS",
"bytes": "70255"
},
{
"name": "Clojure",
"bytes": "622652"
},
{
"name": "Cuda",
"bytes": "1974745"
},
{
"name": "Dockerfile",
"bytes": "149523"
},
{
"name": "Groovy",
"bytes": "160449"
},
{
"name": "HTML",
"bytes": "171537"
},
{
"name": "Java",
"bytes": "189275"
},
{
"name": "JavaScript",
"bytes": "98224"
},
{
"name": "Julia",
"bytes": "430755"
},
{
"name": "Jupyter Notebook",
"bytes": "11091342"
},
{
"name": "Lua",
"bytes": "17720"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "215967"
},
{
"name": "Perl",
"bytes": "1551186"
},
{
"name": "PowerShell",
"bytes": "13906"
},
{
"name": "Python",
"bytes": "36943114"
},
{
"name": "R",
"bytes": "134921"
},
{
"name": "Raku",
"bytes": "7280"
},
{
"name": "Ruby",
"bytes": "4930"
},
{
"name": "SWIG",
"bytes": "140111"
},
{
"name": "Scala",
"bytes": "1304960"
},
{
"name": "Shell",
"bytes": "1312832"
},
{
"name": "Smalltalk",
"bytes": "3497"
},
{
"name": "Starlark",
"bytes": "69877"
},
{
"name": "TypeScript",
"bytes": "243012"
}
],
"symlink_target": ""
} |
from .node import Node
class Numeral(Node):
'''
For \arabic{chapter} etc
'''
def __init__(self):
Node.__init__(self)
def chars(self, nobrackets=False, **kwargs):
s = []
for child in self.children:
s.append(child.chars())
if nobrackets:
return '{}'.format(''.join(s))
return '{{{}}}'.format(''.join(s))
| {
"content_hash": "80a321e060d5a6d1c813133ed620a321",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 48,
"avg_line_length": 19.75,
"alnum_prop": 0.4936708860759494,
"repo_name": "dimbyd/latextree",
"id": "08b1dca33ec5c0ce0bfe4861f666085c99fbe003",
"size": "409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "latextree/parser/numeral.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "38328"
},
{
"name": "HTML",
"bytes": "56464"
},
{
"name": "JavaScript",
"bytes": "2306"
},
{
"name": "Python",
"bytes": "215057"
},
{
"name": "TeX",
"bytes": "43266"
}
],
"symlink_target": ""
} |
from distutils.core import setup
from txbert import __version__ as version
setup(
name = 'txbert',
version = version,
description = 'Twisted BERT-RPC Library',
author = 'Jonathan Stoppani',
author_email = 'jonathan.stoppani@gmail.com',
url = 'http://garetjax.info',
packages = ['txbert'],
license='MIT',
classifiers = [
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| {
"content_hash": "489b469a511c439ba9019b055404386b",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 71,
"avg_line_length": 28.5,
"alnum_prop": 0.6263157894736842,
"repo_name": "GaretJax/txBERT-RPC",
"id": "e7affe2e7ddbaf1be5f5711626891329ee82e953",
"size": "593",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "396"
},
{
"name": "Python",
"bytes": "28170"
},
{
"name": "Ruby",
"bytes": "221"
}
],
"symlink_target": ""
} |
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Chopsticks'
copyright = '2017, Daniel Pope'
author = 'Daniel Pope'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
try:
import sphinx_rtd_theme
except ImportError:
html_theme = 'default'
else:
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'Chopsticks v0.4'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Chopsticksdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Chopsticks.tex', 'Chopsticks Documentation',
'Daniel Pope', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'chopsticks', 'Chopsticks Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Chopsticks', 'Chopsticks Documentation',
author, 'Chopsticks', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
| {
"content_hash": "d1d84db09a71b09742b8d77b8e6b0a59",
"timestamp": "",
"source": "github",
"line_count": 315,
"max_line_length": 80,
"avg_line_length": 28.193650793650793,
"alnum_prop": 0.6895619862628083,
"repo_name": "lordmauve/chopsticks",
"id": "21c2b537d5ab761e0b72455f950e3090918bbaba",
"size": "9958",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "97824"
},
{
"name": "Shell",
"bytes": "107"
}
],
"symlink_target": ""
} |
import numpy as np
def predict(features, weights):
return np.dot(features ,weights)
def Ridge_Regression_gradient_descent(features, output, initial_weights, step_size, l2_penalty, tolerance, max_iterations=100):
converged = False
weights = np.array(initial_weights)
weights_scaler = np.ones(len(weights))
weights_scaler[1:] = 1.0 - 2.0 * step_size * l2_penalty
print weights_scaler
weights_scaler = weights_scaler.reshape((len(weights),1))
iteration = 0
print 'Starting Gradient descent'
while not converged:
prediction = predict(features, weights)
errors = output - prediction
product = (features.T).dot(errors)
gradient = -2.0 * product + 2.0 * l2_penalty * weights
gradient_magnitude = np.sqrt(np.sum(gradient * gradient))
weights = weights_scaler * weights + 2.0 * step_size * product
iteration += 1
if (iteration > max_iterations) or (gradient_magnitude < tolerance):
converged = True
print 'Stopping at iteration: ' + str(iteration - 1)
return(weights)
def get_residual_sum_of_squares(features, weights, output):
predictions = predict(features, weights)
residual = np.sum((predictions - output) ** 2)
return(residual)
def extract_data_from_features(data, features, output):
data['constant'] = 1.0
features = ['constant'] + features
features_matrix = np.array(data[features])
if output != None:
output_array = np.array(data[output])
else:
output_array = []
return(features_matrix, output_array) | {
"content_hash": "5bbc5da2563808b094456ee8800eb629",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 127,
"avg_line_length": 33.8936170212766,
"alnum_prop": 0.6559949780288763,
"repo_name": "nonlining/SimpleML",
"id": "9162d52a1b683a5de954efcb0554d92e5faa07af",
"size": "1918",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Regression/RidgeRegression.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46673"
}
],
"symlink_target": ""
} |
import time
import random
import sys
from urllib import FancyURLopener
from jinja2 import Environment, PackageLoader
from bs4 import BeautifulSoup
# Fancy User-Agent string
class MyOpener(FancyURLopener):
version = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.4 (KHTML, like Gecko) Chrome/22.0.1229.79 Safari/537.4'
myopener = MyOpener()
# Prepare template
env = Environment(loader=PackageLoader('__main__', 'templates'))
template = env.get_template('zakupki.tmpl')
# Prepare array for data storage
entries = []
# Process all pages
for pagenum in range(1, 100):
# Get page handler
print 'Fetching page ' + str(pagenum) + '...',
sys.stdout.flush()
f = myopener.open('http://zakupki.gov.ru/pgz/public/action/search/region/result?rfSubjects=5277335&index='
+ str(pagenum)
+ '&sortField=lastEventDate&descending=true&tabName=AP&lotView=false&pageX=&pageY=');
print 'Done!'
# Parse page contents
doc = BeautifulSoup(f)
# Get content
table = doc.find('table', 'searchResultTable iceDatTbl')
for row in table.find_all('tr', 'searchResultTableRow'):
entry = {}
# Extract description type
type = row.find('span', 'blueBold')
entry['description_type'] = type.string.strip()
# Extract description number and link
entry['description_number_href'] = type.parent.a.get('href')
entry['description_number'] = type.parent.a.span.string.strip()
# Extract description text and link
entry['description_text_href'] = type.parent.parent.parent.select('.iceOutLnk')[0].get('href')
entry['description_text'] = type.parent.parent.parent.select('.iceOutLnk')[0].string.strip()
# Extract description org text and link
entry['description_org_href'] = type.parent.parent.parent.select('.iceCmdLnk')[1].get('href')
entry['description_org'] = type.parent.parent.parent.select('.iceCmdLnk')[1].span.string.strip()
# Extract published and updated
dates = row.find_all('td', 'iceDatTblCol searchResultTableCol searchResultColumn tableColumn70')
entry['published'] = dates[0].span.string.strip()
entry['updated_href'] = dates[1].a.get('href')
entry['updated'] = dates[1].a.span.string.strip()
# Extract price
entry['price'] = row.find('td', 'iceDatTblCol searchResultTableCol searchResultColumn tableColumn105').span.string.strip()
# Extract additional info
entry['additional'] = u''
for link in row.find('table', 'tableColumn70').find_all('a'):
entry['additional'] += '<a href="http://zakupki.gov.ru' + link.get('href') + '" target="_blank">' + link.span.string + '</a><br/>'
# Add new entry
entries.append(entry)
# Random pause to confuse checking tools
pause = random.randint(1, 2)
print 'Sleeping for ' + str(pause) + ' seconds...',
sys.stdout.flush()
time.sleep(pause)
print 'OK'
# Output
report = open('zakupki.html', 'w')
report.write(template.render(entries = entries).encode('utf-8'))
report.close()
| {
"content_hash": "96ebfd511849cecfa48fbb4d0d010c85",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 139,
"avg_line_length": 36.535714285714285,
"alnum_prop": 0.6663408276311502,
"repo_name": "vince-stark/grab-zakupki-gov",
"id": "b2aaccf45db7ad9728ed6b0e0ae2c217be0c23bf",
"size": "3092",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "grab-zakupki-gov.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3092"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
VERSION = '0'
long_description = ''
setup(name='banzai',
py_modules=['banzai'],
version=VERSION,
packages=find_packages(exclude="tests"),
author='Thom Neale',
author_email='twneale@gmail.com',
license='BSD',
url='http://github.com/twneale/banzai/',
description='Tool for pipelining tasks together through a CLI interface',
long_description=long_description,
platforms=['any'],
)
| {
"content_hash": "f1e48a795cd167f6bb8ceb92d223eeec",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 79,
"avg_line_length": 24.4,
"alnum_prop": 0.6536885245901639,
"repo_name": "twneale/banzai",
"id": "a5c3c1527c013f3295859ca5eba6ac08904dc8c7",
"size": "510",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "44095"
},
{
"name": "Shell",
"bytes": "144"
}
],
"symlink_target": ""
} |
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from board.models import UserProfile
def create_user_profile(sender, instance, created, **kwargs):
if created and not UserProfile.objects.filter(user=instance).exists():
print "USER CREATED"
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User, dispatch_uid="create_user_profile")
| {
"content_hash": "54bf84de15fa78e20a0e05c7fa552632",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 87,
"avg_line_length": 39.54545454545455,
"alnum_prop": 0.767816091954023,
"repo_name": "cypreess/PyrateDice",
"id": "f45748afaeab339dfce123e4ec95195251965340",
"size": "435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "game_server/game_server/board/signals.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
"""
regobj: Pythonic object-based access to the Windows Registry
This module provides a thin wrapper around the standard _winreg module,
allowing easier and more pythonic access to the Windows Registry.
All access to the registry is done through Key objects, which (surprise!)
represent a specific registry key. To begin, there are pre-existing Key
objects defined for the HKEY_* root keys, using both long and short names:
>>> HKEY_CURRENT_USER
<regobj Key 'HKEY_CURRENT_USER'>
>>> HKLM
<regobj Key 'HKEY_LOCAL_MACHINE'>
Traversing and creating subkeys is then as simple as ordinary python
attribute access:
>>> HKCU.Software.Microsoft.Windows
<regobj Key 'HKEY_CURRENT_USER\Software\Microsoft\Windows'>
>>> HKCU.Software.MyTests
Traceback (most recent call last):
...
AttributeError: subkey 'MyTests' does not exist
>>> HKCU.Software.MyTests = Key
>>> HKCU.Software.MyTests
<regobj Key 'HKEY_CURRENT_USER\Software\MyTests'>
>>> del HKCU.Software.MyTests
Of course, for keys that don't happen to be named like python identifiers,
there are also methods that can accomplish the same thing. To help reduce
visual clutter, calling a key object is a shorthand for attribute lookup:
>>> HKCU.Software.set_subkey("my-funny-key",Key)
>>> HKCU.Software.get_subkey("my-funny-key").SubKey = Key
>>> HKCU("Software\\my-funny-key\\SubKey")
<regobj Key 'HKEY_CURRENT_USER\Software\my-funny-key\SubKey'>
>>> HKCU.Software.del_subkey("my-funny-key")
The individual values contained in a key can be accessed using standard
item access syntax. The returned objects will be instances of the Value
class, with 'name', 'type' and 'data' attributes:
>>> HKCU.Software.Microsoft.Clock["iFormat"]
<regobj Value (iFormat,1,REG_SZ)>
>>> HKCU.Software.Microsoft.Clock["iFormat"].name
'iFormat'
>>> print(HKCU.Software.Microsoft.Clock["iFormat"].data)
1
>>> print(type(HKCU.Software.Microsoft.Clock["iFormat"].data) is type(b'1'.decode('utf8')))
True
>>> HKCU.Software.Microsoft.Clock["iFormat"].type
1
>>> HKCU.Software.Microsoft.Clock["notavalue"]
Traceback (most recent call last):
...
KeyError: "no such value: 'notavalue'"
Iterating over a key generates all the contained values, followed by
all the contained subkeys. There are also methods to seperately iterate
over just the values, and just the subkeys:
>>> winK = HKCU.Software.Microsoft.Windows
>>> winK["testvalue"] = 42
>>> for obj in winK:
... print(obj)
<regobj Value (testvalue,42,REG_DWORD)>
<regobj Key 'HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion'>
<regobj Key 'HKEY_CURRENT_USER\Software\Microsoft\Windows\Shell'>
<regobj Key 'HKEY_CURRENT_USER\Software\Microsoft\Windows\ShellNoRoam'>
>>> [k.name for k in winK.subkeys()]
['CurrentVersion', 'Shell', 'ShellNoRoam']
>>> [v.data for v in winK.values()]
[42]
>>> del winK["testvalue"]
These iterators also provide efficient implementations of the __contains__
and __len__ methods, so they can be used as follows:
>>> "Shell" in HKCU.Software.Microsoft.Windows
True
>>> "Shell" in HKCU.Software.Microsoft.Windows.subkeys()
True
>>> "Shell" in HKCU.Software.Microsoft.Windows.values()
False
>>> len(HKCU.Software.Microsoft.Windows)
3
>>> len(HKCU.Software.Microsoft.Windows.values())
0
Finally, there is powerful support for specifying key and value structures
at creation time. The simplest case has already been demonstrated, where
setting a subkey to the Key class or to None will create it without any data:
>>> HKCU.Software.MyTests = None
>>> len(HKCU.Software.MyTests)
0
If a subkey is assigned an existing key object, the data from that key is
copied into the subkey:
>>> HKCU.Software.MyTests = HKCU.Software.Microsoft.Windows
>>> len(HKCU.Software.MyTests)
3
>>> [k.name for k in HKCU.Software.MyTests]
['CurrentVersion', 'Shell', 'ShellNoRoam']
>>> del HKCU.Software.MyTests
If a subkey is assigned a dictionary, the structure of that dictionary is
copied into the subkey. Scalar values become key values, while nested
dictionaries create subkeys:
>>> HKCU.Software.MyTests = {"val1":7, "stuff":{"a":1,"c":2,"e":3}}
>>> len(HKCU.Software.MyTests)
2
>>> [v.name for v in HKCU.Software.MyTests.values()]
['val1']
>>> [k.name for k in HKCU.Software.MyTests.subkeys()]
['stuff']
>>> len(HKCU.Software.MyTests.stuff)
3
>>> del HKCU.Software.MyTests
Any other value assigned to a subkey will become the default value for
that key (i.e. the value with name ""):
>>> HKCU.Software.MyTests = "dead parrot"
>>> print(HKCU.Software.MyTests[""].data)
dead parrot
>>> print(type(HKCU.Software.MyTests[""].data) is type(b'dead parrot'.decode('utf8')))
True
>>> del HKCU.Software.MyTests
And that's that - enjoy!
"""
__ver_major__ = 0
__ver_minor__ = 2
__ver_patch__ = 2
__ver_sub__ = ""
__version__ = "%d.%d.%d%s" % (__ver_major__,__ver_minor__,
__ver_patch__,__ver_sub__)
import sys
PY3 = sys.hexversion > 0x03000000
if PY3:
import winreg as _winreg
else:
import _winreg
# Import type constants into our namespace
TYPES = {}
TYPE_NAMES = ("REG_SZ","REG_RESOURCE_LIST","REG_NONE","REG_MULTI_SZ","REG_LINK",
"REG_EXPAND_SZ","REG_DWORD_BIG_ENDIAN","REG_DWORD_LITTLE_ENDIAN",
"REG_DWORD","REG_BINARY")
for nm in TYPE_NAMES:
val = getattr(_winreg,nm)
globals()[nm] = val
TYPES[val] = nm
# Import SAM permission constants into our namespace
SAMS = {}
SAM_NAMES = ("KEY_ALL_ACCESS","KEY_CREATE_LINK","KEY_CREATE_SUB_KEY",
"KEY_EXECUTE","KEY_NOTIFY","KEY_QUERY_VALUE","KEY_READ",
"KEY_SET_VALUE","KEY_WRITE","KEY_ENUMERATE_SUB_KEYS")
for nm in SAM_NAMES:
val = getattr(_winreg,nm)
globals()[nm] = val
SAMS[val] = nm
class Key(object):
"""Class representing a registry key.
Each key has a name and a parent key object. Its values can be
accessed using standard item access syntax, while its subkeys can
be accessed using standard attribute access syntax.
Normally code would not create instance of this class directly.
Rather, it would begin with one of the root key objects defined in
this module (e.g. HKEY_CURRENT_USER) and then traverse it to load
the appropriate key.
"""
def __init__(self,name,parent,sam=KEY_READ,hkey=None):
"""Construct a new Key object.
The key's name and parent key must be specified. If the given
name is a backslash-separated path it will be processed one
component at a time and the intermediate Key objects will be
transparently instantiated.
The optional argument 'sam' gives the security access mode to use
for the key, defaulting to KEY_READ. It more permissions are required
for an attempted operation, we attempt to upgrade the permission
automatically.
If the optional argument 'hkey' is given, it is the underlying
key id to be used when accessing the registry. This should really
only be used for bootstrapping the root Key objects.
"""
names = [nm for nm in name.split("\\") if nm]
if len(names) == 0:
raise ValueError("a non-empty key name is required")
for pname in names[:-1]:
parent = Key(pname,parent)
self.name = names[-1]
self.parent = parent
self.sam = sam
if hkey is not None:
self.hkey = hkey
def _get_hkey(self):
try:
return self.__dict__["hkey"]
except KeyError:
self.hkey = _winreg.OpenKey(self.parent.hkey,self.name,0,self.sam)
return self.hkey
def _del_hkey(self):
if self.parent is not None:
try:
_winreg.CloseKey(self.__dict__["hkey"])
except KeyError:
pass
try:
del self.__dict__["hkey"]
except KeyError:
pass
def get_subkey(self,name):
"""Retreive the subkey with the specified name.
If the named subkey is not found, AttributeError is raised;
this is for consistency with the attribute-based access notation.
"""
subkey = Key(name,self)
try:
hkey = subkey.hkey
except WindowsError:
raise AttributeError("subkey '%s' does not exist" % (name,))
return subkey
def set_subkey(self,name,value=None):
"""Create the named subkey and set its value.
There are several different ways to specify the new contents of
the named subkey:
* if 'value' is the Key class, a subclass thereof, or None, then
the subkey is created but not populated with any data.
* if 'value' is a key instance, the data from that key will be
copied into the new subkey.
* if 'value' is a dictionary, the dict's keys are interpreted as
key or value names and the corresponding entries are created
within the new subkey - nested dicts create further subkeys,
while scalar values create values on the subkey.
* any other value will be converted to a Value object and assigned
to the default value for the new subkey.
"""
self.sam |= KEY_CREATE_SUB_KEY
subkey = Key(name,self)
try:
subkey = self.get_subkey(name)
except AttributeError:
_winreg.CreateKey(self.hkey,name)
subkey = self.get_subkey(name)
if value is None:
pass
elif issubclass(type(value),type) and issubclass(value,Key):
pass
elif isinstance(value,Key):
for v in value.values():
subkey[v.name] = v
for k in value.subkeys():
subkey.set_subkey(k.name,k)
elif isinstance(value,dict):
for (nm,val) in value.items():
if isinstance(val,dict):
subkey.set_subkey(nm,val)
elif isinstance(val,Key):
subkey.set_subkey(nm,val)
elif issubclass(type(val),type) and issubclass(val,Key):
subkey.set_subkey(nm,val)
else:
subkey[nm] = val
else:
if not isinstance(value,Value):
value = Value(value)
subkey[value.name] = value
def del_subkey(self,name):
"""Delete the named subkey, and any values or keys it contains."""
self.sam |= KEY_WRITE
subkey = self.get_subkey(name)
subkey.clear()
_winreg.DeleteKey(subkey.parent.hkey,subkey.name)
def close(self):
"""Release underlying resources associated with this key."""
del self.hkey
def flush(self):
"""Ensure that the key's data is flushed to disk.
Quoting the _winreg documentation:
It is not necessary to call FlushKey() to change a key. Registry
changes are flushed to disk by the registry using its lazy flusher.
Registry changes are also flushed to disk at system shutdown.
Unlike CloseKey(), the FlushKey() method returns only when all the
data has been written to the registry. An application should only
call FlushKey() if it requires absolute certainty that registry
changes are on disk.
If you don't know whether a FlushKey() call is required, it
probably isn't.
"""
_winreg.FlushKey(self.hkey)
def __eq__(self,key):
try:
return self.hkey == key.hkey
except AttributeError:
False
def __str__(self):
return "<regobj Key '%s'>" % (self.path,)
def __repr__(self):
return str(self)
def __call__(self,name):
"""Calling accesses a subkey
This is provided as a convenient shorthand for subkey names that
are not valid python identifiers.
"""
return self.get_subkey(name)
def __getattr__(self,name):
"""Attribute access returns a subkey."""
if name == "hkey":
return self._get_hkey()
elif name == "path":
if self.parent is None:
return self.name
else:
return self.parent.path + "\\" + self.name
else:
return self.get_subkey(name)
def __setattr__(self,name,value):
"""Attribute assignment creates a new subkey."""
if name == "sam":
sam = self.__dict__.get("sam",0)
if sam|value != sam:
del self.hkey
self.__dict__[name] = value
elif name == "path":
raise AttributeError("'path' cannot be set")
elif name in ("name","parent","hkey",):
self.__dict__[name] = value
else:
self.set_subkey(name,value)
def __delattr__(self,name):
"""Deleting an attribute deletes the subkey."""
if name == "hkey":
self._del_hkey()
else:
self.del_subkey(name)
def __getitem__(self,name):
"""Item access retrieves key values."""
self.sam |= KEY_QUERY_VALUE
try:
data = _winreg.QueryValueEx(self.hkey,name)
except WindowsError:
raise KeyError("no such value: '%s'" % (name,))
return Value(data[0],name,data[1])
def __setitem__(self,name,value):
"""Item assignment sets key values."""
self.sam |= KEY_SET_VALUE
if not isinstance(value,Value):
value = Value(value,name)
_winreg.SetValueEx(self.hkey,name,0,value.type,value.data)
def __delitem__(self,name):
"""Item deletion deletes key values."""
self.sam |= KEY_SET_VALUE
try:
_winreg.DeleteValue(self.hkey,name)
except WindowsError:
raise KeyError("no such value: '%s'" % (name,))
def __contains__(self,name):
"""A key contains a name if it has a matching subkey or value."""
return (name in self.values() or name in self.subkeys())
def __len__(self):
"""len() gives the number of values and subkeys."""
info = _winreg.QueryInfoKey(self.hkey)
return info[0] + info[1]
def __iter__(self):
"""Default iteration is over both values and subkeys."""
for v in self.values():
yield v
for k in self.subkeys():
yield k
def clear(self):
"""Remove all subkeys and values from this key."""
self.sam |= KEY_WRITE
for v in list(self.values()):
del self[v.name]
for k in list(self.subkeys()):
self.del_subkey(k.name)
def subkeys(self):
"""Iterator over the subkeys of this key."""
self.sam |= KEY_ENUMERATE_SUB_KEYS
return SubkeyIterator(self)
def values(self):
"""Iterator over the key's values."""
return ValueIterator(self)
class Value(object):
"""Class representing registry key values.
Each Value instance has a name, a type and some associated data.
The default name is '', which corresponds to the default value for
a registry key. The type must be one of the REG_* constants from
this module; if it is not specified, it will be guessed from the
type of the data.
"""
_DWORD_MAX_SIGNED = (1<<31) - 1
_DWORD_MIN_SIGNED = -1 * (1<<32)
_DWORD_MAX_UNSIGNED = (1<<32) - 1
def __init__(self,data=None,name="",type=None):
if type is None:
type = self._default_type(data)
# DWORD values are unsigned, but _winreg treats them as signed.
# We do some conversion on input so that unsigned values are
# accepted, but python will convert them into negative integers.
# when you read it back out :-(
if data is not None and type == REG_DWORD:
if data < self._DWORD_MIN_SIGNED:
raise ValueError("DWORD value too small: %s" % (data,))
elif data > self._DWORD_MAX_UNSIGNED:
raise ValueError("DWORD value too large: %s" % (data,))
elif data > self._DWORD_MAX_SIGNED:
data = int(data - self._DWORD_MAX_UNSIGNED - 1)
self.name = name
self.data = data
self.type = type
def __str__(self):
data = (self.name,self.data,TYPES[self.type])
return "<regobj Value (%s,%s,%s)>" % data
def __repr__(self):
return str(self)
def _default_type(self,data):
if isinstance(data,int) or (not PY3 and isinstance(data,long)):
return REG_DWORD
if data is None:
return REG_NONE
return REG_SZ
class SubkeyIterator(object):
"""Iterator over the subkeys contained in a key.
This iterator is capable of efficient membership detection
and length reporting. As usual, the underlying registry key
should not be modified during iteration.
"""
def __init__(self,key):
self.key = key
self.index = 0
def __len__(self):
return _winreg.QueryInfoKey(self.key.hkey)[0]
def __contains__(self,name):
try:
self.key(name)
except AttributeError:
return False
return True
def __iter__(self):
return self
def next(self):
try:
k = _winreg.EnumKey(self.key.hkey,self.index)
except WindowsError:
raise StopIteration
else:
self.index += 1
return Key(k,self.key)
__next__ = next
class ValueIterator(object):
"""Iterator over the values contained in a key.
This iterator is capable of efficient membership detection
and length reporting. As usual, the underlying registry key
should not be modified during iteration.
"""
def __init__(self,key):
self.key = key
self.index = 0
def __len__(self):
return _winreg.QueryInfoKey(self.key.hkey)[1]
def __contains__(self,name):
try:
self.key[name]
except KeyError:
return False
return True
def __iter__(self):
return self
def next(self):
try:
v = _winreg.EnumValue(self.key.hkey,self.index)
except WindowsError:
raise StopIteration
else:
self.index += 1
return Value(v[1],v[0],v[2])
__next__ = next
# Bootstrap by creating constants for the root keys
HKCR = Key("HKEY_CLASSES_ROOT",None,KEY_READ,_winreg.HKEY_CLASSES_ROOT)
HKEY_CLASSES_ROOT = HKCR
HKCC = Key("HKEY_CURRENT_CONFIG",None,KEY_READ,_winreg.HKEY_CURRENT_CONFIG)
HKEY_CURRENT_CONFIG = HKCC
HKCU = Key("HKEY_CURRENT_USER",None,KEY_READ,_winreg.HKEY_CURRENT_USER)
HKEY_CURRENT_USER = HKCU
HKDD = Key("HKEY_DYN_DATA",None,KEY_READ,_winreg.HKEY_DYN_DATA)
HKEY_DYN_DATA = HKDD
HKLM = Key("HKEY_LOCAL_MACHINE",None,KEY_READ,_winreg.HKEY_LOCAL_MACHINE)
HKEY_LOCAL_MACHINE = HKLM
HKPD = Key("HKEY_PERFORMANCE_DATA",None,KEY_READ,_winreg.HKEY_PERFORMANCE_DATA)
HKEY_PERFORMANCE_DATA = HKPD
HKU = Key("HKEY_USERS",None,KEY_READ,_winreg.HKEY_USERS)
HKEY_USERS = HKU
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.ELLIPSIS)
| {
"content_hash": "3b1311ab6ef45a0d4a6f92feccee0f47",
"timestamp": "",
"source": "github",
"line_count": 584,
"max_line_length": 93,
"avg_line_length": 33.138698630136986,
"alnum_prop": 0.6156668216813931,
"repo_name": "EhsanKia/keypirinha-plugins",
"id": "6fe12a5b3b7ca60d64708d664e4febb99cab1af4",
"size": "19523",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "keypirinha-steam/src/lib/regobj.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46674"
}
],
"symlink_target": ""
} |
from JumpScale import j
import lzma
import JumpScale.baselib.gitlab
class ChangeTrackerClient():
def __init__(self, MDPath,namespace="backup"):
self.errors=[]
self.excludes=["*.pyc"]
self.MDPath = MDPath
if cachePath=="":
cachePath="/opt/backup/CACHE"
self.cachePath = cachePath # Intermediate link path!
# Local blob STOR
self.STORpath="/opt/backup/STOR"
passwd = j.application.config.get('grid.master.superadminpasswd')
login="root"
# blobstor2 client
# self.client = j.servers.zdaemon.getZDaemonClient("127.0.0.1",port=2345,user=login,passwd=passwd,ssl=False,sendformat='m', returnformat='m',category="blobserver")
self.client=j.clients.blobstor2.getClient(namespace=namespace, name=blobclientname)
self.namespace=namespace
self.repoId=1 # will be implemented later with osis
self.compress=False
self.errors=[]
def _normalize(self, path):
path=path.replace("'","\\'")
path=path.replace("[","\\[")
path=path.replace("]","\\]")
return path
def action_link(self, src, dest):
#DO NOT IMPLEMENT YET
j.system.fs.createDir(j.system.fs.getDirName(dest))
print "link:%s %s"%(src, dest)
if j.system.fs.exists(path=dest):
stat=j.system.fs.statPath(dest)
if stat.st_nlink<2:
raise RuntimeError("only support linked files")
else:
cmd="ln '%s' '%s'"%(self._normalize(src),self._normalize(dest))
try:
j.system.process.execute(cmd)
except Exception,e:
print "ERROR",
print cmd
print e
self.errors.append(["link",cmd,e])
def _dump2stor(self, data,key=""):
if len(data)==0:
return ""
if key=="":
key = j.tools.hash.md5_string(data)
data2 = lzma.compress(data) if self.compress else data
if not self.client.exists(key=key,repoId=self.repoId):
self.client.set(key=key, data=data2,repoId=self.repoId)
return key
def _read_file(self,path, block_size=0):
if block_size==0:
block_size=self._MB4
with open(path, 'rb') as f:
while True:
piece = f.read(block_size)
if piece:
yield piece
else:
return
def doError(self,path,msg):
self.errors.append([path,msg])
def _handleMetadata(self, path,destination,prefix,ttype,linkdest=None,fullcheck=False):
"""
@return (mdchange,contentchange) True if changed, False if not
@param destination is destination inside target dir
"""
# print "MD:%s "%path,
srcpart=j.system.fs.pathRemoveDirPart(path,prefix,True)
dest=j.system.fs.joinPaths(self.MDPath,"MD",destination,srcpart)
dest2=j.system.fs.joinPaths(destination,srcpart)
dest2=dest2.lstrip("/")
change=False
try:
stat=j.system.fs.statPath(path)
except Exception,e:
if not j.system.fs.exists(path):
#can be link which does not exist
#or can be file which is deleted in mean time
self.doError(path,"could not find, so could not backup.")
return (False,False,"",dest2)
#next goes for all types
if j.system.fs.exists(path=dest):
if ttype=="D":
dest+="/.meta"
item=self.getMDObjectFromFs(dest)
mdchange=False
elif ttype=="L":
dest=j.system.fs.joinPaths(self.MDPath,"LINKS",destination,srcpart)
if j.system.fs.exists(path=dest):
if j.system.fs.exists(j.system.fs.joinPaths(dest,".meta")):
#is dir
dest=j.system.fs.joinPaths(dest,".meta")
item=self.getMDObjectFromFs(dest)
mdchange=False
else:
item=Item()
mdchange=True
else:
item=Item()
mdchange=True
if ttype=="F":
if fullcheck or item.mtime<>stat.st_mtime or item.size<>stat.st_size:
newMD5=j.tools.hash.md5(path)
if item.hash<>newMD5:
mdchange=True
change=True
item.hash=newMD5
elif ttype=="L":
if not item.__dict__.has_key("dest"):
mdchange=True
elif linkdest<>item.dest:
mdchange=True
if mdchange==False:
#check metadata changed based on mode, uid, gid & mtime
if ttype=="F":
if int(item.size)<>int(stat.st_size):
mdchange==True
if int(item.mtime)<>int(stat.st_mtime) or int(item.mode)<>int(stat.st_mode) or\
int(item.uid)<>int(stat.st_uid) or int(item.gid)<>int(stat.st_gid):
mdchange=True
if mdchange:
print "MD:%s CHANGE"%path
# print "MDCHANGE"
item.mode=int(stat.st_mode)
item.uid=int(stat.st_uid)
item.gid=int(stat.st_gid)
# item.atime=stat.st_atime
item.ctime=int(stat.st_ctime)
item.mtime=int(stat.st_mtime)
if ttype=="F":
item.size=stat.st_size
item.type="F"
j.system.fs.createDir(j.system.fs.getDirName(dest))
elif ttype=="D":
item.type="D"
dest=j.system.fs.joinPaths(self.MDPath,"MD",destination,srcpart,".meta")
j.system.fs.createDir(j.system.fs.getDirName(dest))
elif ttype=="L":
item.dest=linkdest
if j.system.fs.isDir(path):
dest=j.system.fs.joinPaths(self.MDPath,"LINKS",destination,srcpart,".meta")
item.type="LD"
else:
dest=j.system.fs.joinPaths(self.MDPath,"LINKS",destination,srcpart)
item.type="LF"
j.system.fs.createDir(j.system.fs.getDirName(dest))
j.system.fs.writeFile(dest, str(item))
return (mdchange,change,item.hash,dest2)
def restore(self, src, dest, namespace):
"""
src is location on metadata dir
dest is where to restore to
"""
self.errors=[]
if src[0] != "/":
src = "%s/%s" % (self.MDPath, src.strip())
if not j.system.fs.exists(path=src):
raise RuntimeError("Could not find source (on mdstore)")
for item in j.system.fs.listFilesInDir(src, True):
destpart=j.system.fs.pathRemoveDirPart(item, src, True)
destfull=j.system.fs.joinPaths(dest, destpart)
self.restore1file(item, destfull, namespace)
def getMDObjectFromFs(self,path):
itemObj=Item(j.system.fs.fileGetContents(path))
return itemObj
def restore1file(self, src, dest, namespace):
print "restore: %s %s" % (src, dest)
itemObj=self.getMDObjectFromFs(src)
j.system.fs.createDir(j.system.fs.getDirName(dest))
if itemObj.hash.strip()=="":
j.system.fs.writeFile(dest,"")
return
blob_path = self._getBlobPath(namespace, itemObj.hash)
if j.system.fs.exists(blob_path):
# Blob exists in cache, we can get it from there!
print "Blob FOUND in cache: %s" % blob_path
j.system.fs.copyFile(blob_path, dest)
return
# Get the file directly or get the blob storing the hashes of file parts!
blob_hash = itemObj.hashlist if hasattr(itemObj, "hashlist") else itemObj.hash
# Get blob from blobstor2
blob = self.client.get(namespace, blob_hash)
# Write the blob
self._writeBlob(dest, blob, itemObj, namespace)
def _writeBlob(self, dest, blob, item, namespace):
"""
Write blob to destination
"""
check="##HASHLIST##"
if blob.find(check)==0:
# found hashlist
print "FOUND HASHLIST %s" % blob
hashlist = blob[len(check) + 1:]
j.system.fs.writeFile(dest,"")
for hashitem in hashlist.split("\n"):
if hashitem.strip() != "":
blob_block = self.client.get(namespace, hashitem)
data = lzma.decompress(blob_block)
j.system.fs.writeFile(dest, data, append=True)
else:
# content is there
data = lzma.decompress(blob)
j.system.fs.writeFile(dest, data)
# chmod/chown
os.chmod(dest,int(item.mode))
os.chown(dest,int(item.uid),int(item.gid))
def backupBatch(self,batch,batchnr=None,total=None):
"""
batch is [[src,md5]]
"""
key2paths={}
for src,md5 in batch:
key2paths[md5]=(src,md5)
print "batch nr:%s check"%batchnr
notexist=self.client.existsBatch(keys=key2paths.keys())
print "batch checked on unique data"
nr=batchnr*1000
for src,md5 in batch:
nr+=1
if md5 in notexist:
hashes=[]
if j.system.fs.statPath(src).st_size>self._MB4:
print "%s/%s:upload file (>4MB) %s"%(nr,total,src)
for data in self._read_file(src):
hashes.append(self._dump2stor(data))
if len(hashes)>1:
out = "##HASHLIST##\n"
hashparts = "\n".join(hashes)
out += hashparts
# Store in blobstor
out_hash = self._dump2stor(out,key=md5) #hashlist is stored on md5 location of file
else:
raise RuntimeError("hashist needs to be more than 1.")
else:
print "%s/%s:upload file (<4MB) %s"%(nr,total,src)
for data in self._read_file(src):
hashes.append(self._dump2stor(data,key=md5))
else:
print "%s/%s:no need to upload, exists: %s"%(nr,total,src)
def backup(self,path,destination="", pathRegexIncludes={},pathRegexExcludes={".*\\.pyc"},childrenRegexExcludes=[".*/dev/.*",".*/proc/.*"],fullcheck=False):
#check if there is a dev dir, if so will do a special tar
##BACKUP:
#tar Szcvf testDev.tgz saucy-amd64-base/rootfs/dev/
##restore
#tar xzvf testDev.tgz -C testd
self._createExistsList(destination)
print "SCAN MD:%s"%path
self.errors=[]
if j.system.fs.exists(j.system.fs.joinPaths(path,"dev")):
cmd="cd %s;tar Szcvf __dev.tgz dev"%path
j.system.process.execute(cmd)
destMDClist=j.system.fs.joinPaths(self.STORpath, "../TMP","plists",self.namespace,destination,".mdchanges")
destFClist=j.system.fs.joinPaths(self.STORpath, "../TMP","plists",self.namespace,destination,".fchanges")
destFlist=j.system.fs.joinPaths(self.STORpath, "../TMP","plists",self.namespace,destination,".found")
j.system.fs.createDir(j.system.fs.getDirName(destMDClist))
j.system.fs.createDir(j.system.fs.getDirName(destFClist))
j.system.fs.createDir(j.system.fs.getDirName(destFlist))
mdchanges = open(destMDClist, 'w')
changes = open(destFClist, 'w')
found = open(destFlist, 'w')
w=j.base.fswalker.get()
callbackMatchFunctions=w.getCallBackMatchFunctions(pathRegexIncludes,pathRegexExcludes,includeFolders=True,includeLinks=True)
def processfile(path,stat,arg):
if path[-4:]==".pyc":
return
self=arg["self"]
prefix=arg["prefix"]
mdchange,fchange,md5,path2=self._handleMetadata(path,arg["destination"],prefix=prefix,ttype="F",fullcheck=arg["fullcheck"])
if mdchange:
arg["mdchanges"].write("%s\n"%(path2))
if arg["fullcheck"] or fchange:
arg["changes"].write("%s|%s\n"%(path,md5))
arg["found"].write("%s\n"%path2)
def processdir(path,stat,arg):
self=arg["self"]
prefix=arg["prefix"]
mdchange,fchange,md5,path=self._handleMetadata(path,arg["destination"],prefix=prefix,ttype="D")
if mdchange:
arg["mdchanges"].write("%s\n"%path)
arg["found"].write("%s\n"%path)
def processlink(src,dest,stat,arg):
# print "LINK: %s %s"%(src,dest)
path=src
self=arg["self"]
prefix=arg["prefix"]
destpart=j.system.fs.pathRemoveDirPart(dest,prefix,True)
mdchange,fchange,md5,path=self._handleMetadata(path,arg["destination"],prefix=prefix,ttype="L",linkdest=destpart)
if mdchange:
arg["mdchanges"].write("%s\n"%path)
arg["found"].write("%s\n"%path)
callbackFunctions={}
callbackFunctions["F"]=processfile
callbackFunctions["D"]=processdir
callbackFunctions["L"]=processlink
arg={}
arg["self"]=self
arg["prefix"]=path
arg["changes"]=changes
arg["mdchanges"]=mdchanges
arg["found"]=found
arg["destination"]=destination
arg["fullcheck"]=fullcheck
# arg["batch"]=[]
w.walk(path,callbackFunctions,arg=arg,callbackMatchFunctions=callbackMatchFunctions,childrenRegexExcludes=childrenRegexExcludes)
changes.close()
found.close()
mdchanges.close()
# self.backupBatch(arg["batch"])
if len(self.errors)>0:
out=""
for path,msg in self.errors:
out+="%s:%s\n"%(path,msg)
epath=j.system.fs.joinPaths(self.MDPath,"ERRORS",destination,"ERRORS.LOG")
j.system.fs.createDir(j.system.fs.getDirName(epath))
j.system.fs.writeFile(epath,out)
#now we need to find the deleted files
#sort all found files when going over fs
cmd="sort %s | uniq > %s_"%(destFlist,destFlist)
j.system.process.execute(cmd)
originalFiles=j.system.fs.joinPaths(self.STORpath, "../TMP","plists",self.namespace,destination,".mdfound")
cmd="sort %s | uniq > %s_"%(originalFiles,originalFiles)
j.system.process.execute(cmd)
deleted=j.system.fs.joinPaths(self.STORpath, "../TMP","plists",self.namespace,destination,".deleted")
#now find the diffs
cmd="diff %s_ %s_ -C 0 | grep ^'- ' > %s"%(originalFiles,destFlist,deleted)
rcode,result=j.system.process.execute(cmd,False)
# if not(rcode==1 and result.strip().replace("***ERROR***","")==""):
# raise RuntimeError("Could not diff : cmd:%s error: %s"%(cmd,result))
f=open(deleted, "r")
for line in f:
line=line.strip()
path=line.lstrip("- ")
dest=j.system.fs.joinPaths(self.MDPath,"MD",path)
j.system.fs.removeDirTree(dest)
dest=j.system.fs.joinPaths(self.MDPath,"LINKS",path)
j.system.fs.removeDirTree(dest)
f.close()
print "SCAN DONE MD:%s"%path
print "START UPLOAD FILES."
#count lines
total=0
f=open(destFClist, "r")
for line in f:
total+=1
f.close()
print "count done"
f=open(destFClist, "r")
counter=0
batch=[]
batchnr=0
for line in f:
path,md5=line.strip().split("|")
batch.append([path,md5])
counter+=1
if counter>1000:
self.backupBatch(batch,batchnr=batchnr,total=total)
batchnr+=1
#final batch
self.backupBatch(batch,batchnr=batchnr,total=total)
f.close()
print "BACKUP DONE."
def _createExistsList(self,dest):
# j.system.fs.pathRemoveDirPart(dest,prefix,True)
print "Walk over MD, to create files which we already have found."
destF=j.system.fs.joinPaths(self.STORpath, "../TMP","plists",self.namespace,dest,".mdfound")
j.system.fs.createDir(j.system.fs.getDirName(destF))
fileF = open(destF, 'w')
def processfile(path,stat,arg):
path2=j.system.fs.pathRemoveDirPart(path, arg["base"], True)
path2=path2.lstrip("/")
if path2[0:2]=="MD":
path2=path2[3:]
if path2[0:5]=="LINKS":
path2=path2[6:]
path2=path2.lstrip("/")
# print path2
if path2[-5:]==".meta":
return
# print "%s : %s"%(path,path2)
# if j.system.fs.isDir(path2):
# path=j.system.fs.joinPaths(path,".meta")
# md=self.getMDObjectFromFs(path)
# fileF.write("%s|%s|%s|%s\n"%(path2,md.size,md.mtime,md.hash))
fileF.write("%s\n"%(path2))
callbackFunctions={}
callbackFunctions["F"]=processfile
callbackFunctions["D"]=processfile
arg={}
arg["base"]=self.MDPath
w=j.base.fswalker.get()
callbackFunctions["F"]=processfile
wpath=j.system.fs.joinPaths(self.MDPath,"MD",dest)
if j.system.fs.exists(path=wpath):
w.walk(wpath,callbackFunctions=callbackFunctions,arg=arg,childrenRegexExcludes=[])
wpath=j.system.fs.joinPaths(self.MDPath,"LINKS",dest)
if j.system.fs.exists(path=wpath):
w.walk(wpath,callbackFunctions=callbackFunctions,arg=arg,childrenRegexExcludes=[])
fileF.close()
print "Walk over MD, DONE"
def _getBlobPath(self, namespace, key):
"""
Get the blob path in Cache dir
"""
# Get the Intermediate path of a certain blob
storpath = j.system.fs.joinPaths(self.cachePath, namespace, key[0:2], key[2:4], key)
return storpath
def _getBlob(self, src, namespace):
"""
Retrieves the blobs in Cache path
"""
# Create the Item Object
itemObj = Item(j.system.fs.fileGetContents(src))
blob_hash = itemObj.hashlist if hasattr(itemObj, "hashlist") else itemObj.hash
# Get blob from blobstor2
blob = self.client.get(key=blob_hash)
# The path which this blob should be saved
blob_path = self._getBlobPath(namespace, itemObj.hash)
j.system.fs.createDir(j.system.fs.getDirName(blob_path))
self._writeBlob(blob_path, blob, itemObj, namespace)
return blob_path
def linkRecipe(self, src, dest, namespace):
"""
Hardlink Recipe from Cache Dir
"""
if not self.cachePath:
raise RuntimeError("Link Path is not Set!")
if src[0] != "/":
src = "%s/%s" % (self.MDPath, src.strip())
if not j.system.fs.exists(path=src):
raise RuntimeError("Could not find source (on mdstore)")
for item in j.system.fs.listFilesInDir(src, True):
# Retrieve blob & blob_path in intermediate location
blob_path = self._getBlob(item, namespace)
# the hardlink destination
destpart = j.system.fs.pathRemoveDirPart(item, src, True)
destfull = j.system.fs.joinPaths(dest, destpart)
# Now, make the link
self.action_link(blob_path, destfull)
class BackupClient:
"""
"""
def __init__(self,backupname,blobclientName,gitlabName="incubaid"):
self.blobclientName=blobclientName
self.gitlabName=gitlabName
self.gitlab=j.clients.gitlab.get(gitlabName)
self.backupname=backupname
self.mdpath="/opt/backup/MD/%s"%backupname
if not j.system.fs.exists(path=self.mdpath):
if not self.gitlab.existsProject(namespace=self.gitlab.loginName, name=backupname):
self.gitlab.createproject(backupname, description='backup set', issues_enabled=0, wall_enabled=0, merge_requests_enabled=0, wiki_enabled=0, snippets_enabled=0, public=0)#, group=accountname)
self.gitclient = self.gitlab.getGitClient(self.gitlab.loginName, backupname, clean=False,path=self.mdpath)
self.filemanager=JSFileMgr(MDPath=self.mdpath,namespace="backup",blobclientname=blobclientName)
def backup(self,path,destination="", pathRegexIncludes={},pathRegexExcludes={},childrenRegexExcludes=[".*/dev/.*","/proc/.*"],fullcheck=False):
# self._clean()
self.filemanager.backup(path,destination=destination, pathRegexIncludes=pathRegexIncludes,pathRegexExcludes=pathRegexExcludes,\
childrenRegexExcludes=childrenRegexExcludes,fullcheck=fullcheck)
self.commitMD()
def _clean(self):
for ddir in j.system.fs.listDirsInDir(self.mdpath,False,True,findDirectorySymlinks=False):
if ddir.lower()<>".git":
j.system.fs.removeDirTree(j.system.fs.joinPaths(self.mdpath,ddir))
for ffile in j.system.fs.listFilesInDir(self.mdpath, recursive=False, followSymlinks=False):
j.system.fs.remove(ffile)
def backupRecipe(self,recipe):
"""
do backup of sources as specified in recipe
example recipe
#when star will do for each dir
/tmp/JSAPPS/apps : * : /DEST/apps
#when no * then dir & below
/tmp/JSAPPS/bin : : /DEST/bin
#now only for 1 subdir
/tmp/JSAPPS/apps : asubdirOfApps : /DEST/apps
"""
self._clean()
self.filemanager.backupRecipe(recipe)
self.commitMD()
def commitMD(self):
print "commit to git"
self.gitclient.commit("backup %s"%j.base.time.getLocalTimeHRForFilesystem())
if j.system.net.tcpPortConnectionTest(self.gitlab.addr,self.gitlab.port):
#found gitlab
print "push to git"
self.gitclient.push()
else:
print "WARNING COULD NOT COMMIT CHANGES TO GITLAB, no connection found.\nDO THIS LATER!!!!!!!!!!!!!!!!!!!!!!"
| {
"content_hash": "8bcbe72a2fb6c4b05dd9b7041fe29f63",
"timestamp": "",
"source": "github",
"line_count": 592,
"max_line_length": 218,
"avg_line_length": 37.932432432432435,
"alnum_prop": 0.5620769504809405,
"repo_name": "Jumpscale/jumpscale6_core",
"id": "b06c58c34bbd9f50880fddf530404e222e826f0a",
"size": "22456",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/JumpScale/baselib/changetracker/ChangeTrackerClient.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "3681"
},
{
"name": "HTML",
"bytes": "11738"
},
{
"name": "JavaScript",
"bytes": "70132"
},
{
"name": "Lua",
"bytes": "2162"
},
{
"name": "Python",
"bytes": "5848017"
},
{
"name": "Shell",
"bytes": "7692"
}
],
"symlink_target": ""
} |
u"""
Warn about features that are not present in Python 2.5, giving a message that
points to the earliest version of Python 2.x (or 3.x, if none) that supports it
"""
from .feature_base import Feature, Features
from lib2to3 import fixer_base
FEATURES = [
#(FeatureName,
# FeaturePattern,
# FeatureMinVersion,
#),
(u"memoryview",
u"power < 'memoryview' trailer < '(' any* ')' > any* >",
u"2.7",
),
(u"numbers",
u"""import_from< 'from' 'numbers' 'import' any* > |
import_name< 'import' ('numbers' dotted_as_names< any* 'numbers' any* >) >""",
u"2.6",
),
(u"abc",
u"""import_name< 'import' ('abc' dotted_as_names< any* 'abc' any* >) > |
import_from< 'from' 'abc' 'import' any* >""",
u"2.6",
),
(u"io",
u"""import_name< 'import' ('io' dotted_as_names< any* 'io' any* >) > |
import_from< 'from' 'io' 'import' any* >""",
u"2.6",
),
(u"bin",
u"power< 'bin' trailer< '(' any* ')' > any* >",
u"2.6",
),
(u"formatting",
u"power< any trailer< '.' 'format' > trailer< '(' any* ')' > >",
u"2.6",
),
(u"nonlocal",
u"global_stmt< 'nonlocal' any* >",
u"3.0",
),
(u"with_traceback",
u"trailer< '.' 'with_traceback' >",
u"3.0",
),
]
class FixFeatures(fixer_base.BaseFix):
run_order = 9 # Wait until all other fixers have run to check for these
# To avoid spamming, we only want to warn for each feature once.
features_warned = set()
# Build features from the list above
features = Features([Feature(name, pattern, version) for
name, pattern, version in FEATURES])
PATTERN = features.PATTERN
def match(self, node):
to_ret = super(FixFeatures, self).match(node)
# We want the mapping only to tell us the node's specific information.
try:
del to_ret[u'node']
except Exception:
# We want it to delete the 'node' from the results
# if it's there, so we don't care if it fails for normal reasons.
pass
return to_ret
def transform(self, node, results):
for feature_name in results:
if feature_name in self.features_warned:
continue
else:
curr_feature = self.features[feature_name]
if curr_feature.version >= u"3":
fail = self.cannot_convert
else:
fail = self.warning
fail(node, reason=curr_feature.message_text())
self.features_warned.add(feature_name)
| {
"content_hash": "56cca2c32591abbf40f704df472992a5",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 89,
"avg_line_length": 30.804597701149426,
"alnum_prop": 0.533955223880597,
"repo_name": "jrialland/python-brain",
"id": "b8e98c9b8663942aa31cfcd0a7aebd1ef92805bd",
"size": "2680",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "3to2-1.0/lib3to2/fixes/fix_features.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "223787"
},
{
"name": "Shell",
"bytes": "1333"
}
],
"symlink_target": ""
} |
import json
import logging
import sys
import click
from click import Context
from click.testing import CliRunner
import pytest
import rasterio
from rasterio.rio import info
from rasterio.rio.main import main_group
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
def test_edit_nodata_err(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
result = runner.invoke(info.edit, [inputfile, '--nodata', '-1'])
assert result.exit_code == 2
def test_edit_nodata(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
result = runner.invoke(info.edit, [inputfile, '--nodata', '255'])
assert result.exit_code == 0
with rasterio.open(inputfile) as src:
assert src.nodata == 255.0
def test_edit_crs_err(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
result = runner.invoke(info.edit, [inputfile, '--crs', 'LOL:WUT'])
assert result.exit_code == 2
def test_edit_crs_epsg(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
result = runner.invoke(info.edit, [inputfile, '--crs', 'EPSG:32618'])
assert result.exit_code == 0
with rasterio.open(inputfile) as src:
assert src.crs == {'init': 'epsg:32618'}
def test_edit_crs_proj4(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
result = runner.invoke(info.edit, [inputfile, '--crs', '+init=epsg:32618'])
assert result.exit_code == 0
with rasterio.open(inputfile) as src:
assert src.crs == {'init': 'epsg:32618'}
def test_edit_crs_obj(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
result = runner.invoke(
info.edit, [inputfile, '--crs', '{"init": "epsg:32618"}'])
assert result.exit_code == 0
with rasterio.open(inputfile) as src:
assert src.crs == {'init': 'epsg:32618'}
def test_edit_transform_err_not_json(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
result = runner.invoke(info.edit, [inputfile, '--transform', 'LOL'])
assert result.exit_code == 2
def test_edit_transform_err_bad_array(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
result = runner.invoke(info.edit, [inputfile, '--transform', '[1,2]'])
assert result.exit_code == 2
def test_edit_transform_affine(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
input_t = '[300.038, 0.0, 101985.0, 0.0, -300.042, 2826915.0]'
result = runner.invoke(info.edit, [inputfile, '--transform', input_t])
assert result.exit_code == 0
with rasterio.open(inputfile) as src:
for a, b in zip(src.affine, json.loads(input_t)):
assert round(a, 6) == round(b, 6)
def test_edit_transform_gdal(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
input_t = '[300.038, 0.0, 101985.0, 0.0, -300.042, 2826915.0]'
result = runner.invoke(info.edit, [
inputfile,
'--transform', '[101985.0, 300.038, 0.0, 2826915.0, 0.0, -300.042]'])
assert result.exit_code == 0
with rasterio.open(inputfile) as src:
for a, b in zip(src.affine, json.loads(input_t)):
assert round(a, 6) == round(b, 6)
def test_edit_tags(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
result = runner.invoke(info.edit, [
inputfile, '--tag', 'lol=1', '--tag', 'wut=2'])
assert result.exit_code == 0
with rasterio.open(inputfile) as src:
assert src.tags()['lol'] == '1'
assert src.tags()['wut'] == '2'
class MockContext:
def __init__(self):
self.obj = {}
class MockOption:
def __init__(self, name):
self.name = name
def test_like_dataset_callback(data):
ctx = MockContext()
info.like_handler(ctx, 'like', str(data.join('RGB.byte.tif')))
assert ctx.obj['like']['crs'] == {'init': 'epsg:32618'}
def test_all_callback_pass(data):
ctx = MockContext()
ctx.obj['like'] = {'transform': 'foo'}
assert info.all_handler(ctx, None, None) == None
def test_all_callback(data):
ctx = MockContext()
ctx.obj['like'] = {'transform': 'foo'}
assert info.all_handler(ctx, None, True) == {'transform': 'foo'}
def test_all_callback_None(data):
ctx = MockContext()
assert info.all_handler(ctx, None, None) is None
def test_transform_callback_pass(data):
"""Always return None if the value is None"""
ctx = MockContext()
ctx.obj['like'] = {'transform': 'foo'}
assert info.transform_handler(ctx, MockOption('transform'), None) is None
def test_transform_callback_err(data):
ctx = MockContext()
ctx.obj['like'] = {'transform': 'foo'}
with pytest.raises(click.BadParameter):
info.transform_handler(ctx, MockOption('transform'), '?')
def test_transform_callback(data):
ctx = MockContext()
ctx.obj['like'] = {'transform': 'foo'}
assert info.transform_handler(ctx, MockOption('transform'), 'like') == 'foo'
def test_nodata_callback_err(data):
ctx = MockContext()
ctx.obj['like'] = {'nodata': 'lolwut'}
with pytest.raises(click.BadParameter):
info.nodata_handler(ctx, MockOption('nodata'), 'lolwut')
def test_nodata_callback_pass(data):
"""Always return None if the value is None"""
ctx = MockContext()
ctx.obj['like'] = {'nodata': -1}
assert info.nodata_handler(ctx, MockOption('nodata'), None) is None
def test_nodata_callback_0(data):
ctx = MockContext()
assert info.nodata_handler(ctx, MockOption('nodata'), '0') == 0.0
def test_nodata_callback(data):
ctx = MockContext()
ctx.obj['like'] = {'nodata': -1}
assert info.nodata_handler(ctx, MockOption('nodata'), 'like') == -1.0
def test_crs_callback_pass(data):
"""Always return None if the value is None"""
ctx = MockContext()
ctx.obj['like'] = {'crs': 'foo'}
assert info.crs_handler(ctx, MockOption('crs'), None) is None
def test_crs_callback(data):
ctx = MockContext()
ctx.obj['like'] = {'crs': 'foo'}
assert info.crs_handler(ctx, MockOption('crs'), 'like') == 'foo'
def test_tags_callback_err(data):
ctx = MockContext()
ctx.obj['like'] = {'tags': {'foo': 'bar'}}
with pytest.raises(click.BadParameter):
info.tags_handler(ctx, MockOption('tags'), '?') == {'foo': 'bar'}
def test_tags_callback(data):
ctx = MockContext()
ctx.obj['like'] = {'tags': {'foo': 'bar'}}
assert info.tags_handler(ctx, MockOption('tags'), 'like') == {'foo': 'bar'}
def test_edit_crs_like(data):
runner = CliRunner()
# Set up the file to be edited.
inputfile = str(data.join('RGB.byte.tif'))
with rasterio.open(inputfile, 'r+') as dst:
dst.crs = {'init': 'epsg:32617'}
dst.nodata = 1.0
# Double check.
with rasterio.open(inputfile) as src:
assert src.crs == {'init': 'epsg:32617'}
assert src.nodata == 1.0
# The test.
templatefile = 'tests/data/RGB.byte.tif'
result = runner.invoke(info.edit, [
inputfile, '--like', templatefile, '--crs', 'like'])
assert result.exit_code == 0
with rasterio.open(inputfile) as src:
assert src.crs == {'init': 'epsg:32618'}
assert src.nodata == 1.0
def test_edit_nodata_like(data):
runner = CliRunner()
# Set up the file to be edited.
inputfile = str(data.join('RGB.byte.tif'))
with rasterio.open(inputfile, 'r+') as dst:
dst.crs = {'init': 'epsg:32617'}
dst.nodata = 1.0
# Double check.
with rasterio.open(inputfile) as src:
assert src.crs == {'init': 'epsg:32617'}
assert src.nodata == 1.0
# The test.
templatefile = 'tests/data/RGB.byte.tif'
result = runner.invoke(info.edit, [
inputfile, '--like', templatefile, '--nodata', 'like'])
assert result.exit_code == 0
with rasterio.open(inputfile) as src:
assert src.crs == {'init': 'epsg:32617'}
assert src.nodata == 0.0
def test_edit_all_like(data):
runner = CliRunner()
inputfile = str(data.join('RGB.byte.tif'))
with rasterio.open(inputfile, 'r+') as dst:
dst.crs = {'init': 'epsg:32617'}
dst.nodata = 1.0
# Double check.
with rasterio.open(inputfile) as src:
assert src.crs == {'init': 'epsg:32617'}
assert src.nodata == 1.0
templatefile = 'tests/data/RGB.byte.tif'
result = runner.invoke(info.edit, [
inputfile, '--like', templatefile, '--all'])
assert result.exit_code == 0
with rasterio.open(inputfile) as src:
assert src.crs == {'init': 'epsg:32618'}
assert src.nodata == 0.0
def test_env():
runner = CliRunner()
result = runner.invoke(main_group, [
'env',
'--formats'
])
assert result.exit_code == 0
assert 'GTiff' in result.output
def test_info_err():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests'])
assert result.exit_code == 1
def test_info():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif'])
assert result.exit_code == 0
assert '"count": 3' in result.output
def test_info_verbose():
runner = CliRunner()
result = runner.invoke(main_group, [
'-v',
'info',
'tests/data/RGB.byte.tif'
])
assert result.exit_code == 0
def test_info_quiet():
runner = CliRunner()
result = runner.invoke(main_group, [
'-q',
'info',
'tests/data/RGB.byte.tif'
])
assert result.exit_code == 0
def test_info_count():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif', '--count'])
assert result.exit_code == 0
assert result.output == '3\n'
def test_info_nodatavals():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif', '--bounds'])
assert result.exit_code == 0
assert result.output == '101985.0 2611485.0 339315.0 2826915.0\n'
def test_info_tags():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif', '--tags'])
assert result.exit_code == 0
assert result.output == '{"AREA_OR_POINT": "Area"}\n'
def test_info_res():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif', '--res'])
assert result.exit_code == 0
assert result.output.startswith('300.037')
def test_info_lnglat():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif', '--lnglat'])
assert result.exit_code == 0
assert result.output.startswith('-77.757')
def test_mo_info():
runner = CliRunner()
result = runner.invoke(info.info, ['tests/data/RGB.byte.tif'])
assert result.exit_code == 0
assert '"res": [300.037' in result.output
assert '"lnglat": [-77.757' in result.output
def test_info_stats():
runner = CliRunner()
result = runner.invoke(
info.info, ['tests/data/RGB.byte.tif', '--tell-me-more'])
assert result.exit_code == 0
assert '"max": 255.0' in result.output
assert '"min": 1.0' in result.output
assert '"mean": 44.4344' in result.output
def test_info_stats_only():
runner = CliRunner()
result = runner.invoke(
info.info, ['tests/data/RGB.byte.tif', '--stats', '--bidx', '2'])
assert result.exit_code == 0
assert result.output.startswith('1.000000 255.000000 66.02')
def test_transform_err():
runner = CliRunner()
result = runner.invoke(main_group, [
'transform'
], "[-78.0]")
assert result.exit_code == 1
def test_transform_point():
runner = CliRunner()
result = runner.invoke(main_group, [
'transform',
'--dst-crs', 'EPSG:32618',
'--precision', '2'
], "[-78.0, 23.0]", catch_exceptions=False)
assert result.exit_code == 0
assert result.output.strip() == '[192457.13, 2546667.68]'
def test_transform_point_dst_file():
runner = CliRunner()
result = runner.invoke(main_group, [
'transform',
'--dst-crs', 'tests/data/RGB.byte.tif', '--precision', '2'
], "[-78.0, 23.0]")
assert result.exit_code == 0
assert result.output.strip() == '[192457.13, 2546667.68]'
def test_transform_point_src_file():
runner = CliRunner()
result = runner.invoke(main_group, [
'transform',
'--src-crs',
'tests/data/RGB.byte.tif',
'--precision', '2'
], "[192457.13, 2546667.68]")
assert result.exit_code == 0
assert result.output.strip() == '[-78.0, 23.0]'
def test_transform_point_2():
runner = CliRunner()
result = runner.invoke(main_group, [
'transform',
'[-78.0, 23.0]',
'--dst-crs', 'EPSG:32618',
'--precision', '2'
])
assert result.exit_code == 0
assert result.output.strip() == '[192457.13, 2546667.68]'
def test_transform_point_multi():
runner = CliRunner()
result = runner.invoke(main_group, [
'transform',
'--dst-crs', 'EPSG:32618',
'--precision', '2'
], "[-78.0, 23.0]\n[-78.0, 23.0]", catch_exceptions=False)
assert result.exit_code == 0
assert result.output.strip() == (
'[192457.13, 2546667.68]\n[192457.13, 2546667.68]')
def test_bounds_defaults():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif'
])
assert result.exit_code == 0
assert 'FeatureCollection' in result.output
def test_bounds_err():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests'
])
assert result.exit_code == 1
def test_bounds_feature():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif',
'--feature'
])
assert result.exit_code == 0
assert result.output.count('Polygon') == 1
def test_bounds_obj_bbox():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif',
'--bbox',
'--precision', '2'
])
assert result.exit_code == 0
assert result.output.strip() == '[-78.9, 23.56, -76.6, 25.55]'
def test_bounds_compact():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif',
'--bbox',
'--precision', '2',
'--compact'
])
assert result.exit_code == 0
assert result.output.strip() == '[-78.9,23.56,-76.6,25.55]'
def test_bounds_indent():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif',
'--bbox',
'--indent', '2',
'--precision', '2'
])
assert result.exit_code == 0
assert len(result.output.split('\n')) == 7
def test_bounds_obj_bbox_mercator():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif',
'--bbox',
'--mercator',
'--precision', '3'
])
assert result.exit_code == 0
assert result.output.strip() == (
'[-8782900.033, 2700489.278, -8527010.472, 2943560.235]')
def test_bounds_obj_bbox_projected():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif',
'--bbox',
'--projected',
'--precision', '3'
])
assert result.exit_code == 0
assert result.output.strip() == (
'[101985.0, 2611485.0, 339315.0, 2826915.0]')
def test_bounds_crs_bbox():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif',
'--bbox',
'--dst-crs', 'EPSG:32618',
'--precision', '3'
])
assert result.exit_code == 0
assert result.output.strip() == (
'[101985.0, 2611485.0, 339315.0, 2826915.0]')
def test_bounds_seq():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif',
'tests/data/RGB.byte.tif',
'--sequence'
])
assert result.exit_code == 0
assert result.output.count('Polygon') == 2
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif',
'tests/data/RGB.byte.tif',
'--sequence',
'--bbox',
'--precision', '2'
])
assert result.exit_code == 0
assert result.output == (
'[-78.9, 23.56, -76.6, 25.55]\n[-78.9, 23.56, -76.6, 25.55]\n')
assert '\x1e' not in result.output
def test_bounds_seq_rs():
runner = CliRunner()
result = runner.invoke(main_group, [
'bounds',
'tests/data/RGB.byte.tif',
'tests/data/RGB.byte.tif',
'--sequence',
'--rs',
'--bbox',
'--precision', '2'
])
assert result.exit_code == 0
assert result.output == (
'\x1e[-78.9, 23.56, -76.6, 25.55]\n\x1e[-78.9, 23.56, -76.6, 25.55]\n')
def test_insp():
runner = CliRunner()
result = runner.invoke(main_group, [
'insp',
'tests/data/RGB.byte.tif'
])
assert result.exit_code == 0
def test_insp_err():
runner = CliRunner()
result = runner.invoke(main_group, [
'insp',
'tests'
])
assert result.exit_code == 1
def test_info_checksums():
runner = CliRunner()
result = runner.invoke(
info.info, ['tests/data/RGB.byte.tif', '--tell-me-more'])
assert result.exit_code == 0
assert '"checksum": [25420, 29131, 37860]' in result.output
def test_info_checksums_only():
runner = CliRunner()
result = runner.invoke(
info.info, ['tests/data/RGB.byte.tif', '--checksum', '--bidx', '2'])
assert result.exit_code == 0
assert result.output.strip() == '29131'
| {
"content_hash": "042174d6661689a8e02ea26613c88683",
"timestamp": "",
"source": "github",
"line_count": 659,
"max_line_length": 80,
"avg_line_length": 27.22610015174507,
"alnum_prop": 0.5889532939471631,
"repo_name": "perrygeo/rasterio",
"id": "eb4e1f3cef2a5b11e43becc1923f0127d70fba0b",
"size": "17942",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_rio_info.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "34752"
},
{
"name": "Python",
"bytes": "515533"
},
{
"name": "Shell",
"bytes": "742"
}
],
"symlink_target": ""
} |
from PyQt5.QtCore import Qt, QEvent, QPoint, QPointF, QTimer
from PyQt5.QtGui import QMouseEvent, QKeyEvent
from PyQt5.QtTest import QTest
from cntestcase import CNTestApp
from cadnano import initAppWithGui
class GUITestApp(CNTestApp):
def __init__(self):
argv = None
self.app = initAppWithGui(argv, do_exec=False) # kick off a Gui style app
self.document = self.app.document()
self.app_window = self.app.cnmain_windows[0]
# Include this or the automatic build will hang
self.app.dontAskAndJustDiscardUnsavedChanges = True
# By setting the widget to the main app_window we can traverse and
# interact with any part of it. Also, tearDown will close
# the application so we don't need to worry about that.
self.setWidget(self.app_window, False)
def tearDown(self):
self._test_widget.close()
self._test_widget = None
self.app.qApp = None
def setWidget(self, widget, show=True):
"""
Must be called in the setUp() method, giving the test widget.
@param show: If show() should be called on the GUI. Set to False if
you don't want to see the GUI running.
"""
self._test_widget = widget
if show:
self._test_widget.show()
# end def
############################ Mouse events ############################
@staticmethod
def graphicsItemClick(graphics_item, button,
modifier=None, pos=None, delay=-1):
""" Convenience method for clicking in a QGraphicsItem to wrap a call
to QTest.mouseClick
Args:
graphics_item (QGraphicsItem):
button (Qt.MouseButton):
pos (QPoint): in item coordinates
"""
gview = graphics_item.scene().views()[0]
if pos is None:
pos = GUITestApp.getItemCenterScenePos(graphics_item)
else:
pos = graphics_item.mapToScene(pos)
pos = gview.mapFromScene(pos)
if modifier is None:
modifier = Qt.KeyboardModifiers()
QTest.mouseClick(gview.viewport(), button,
modifier=modifier, pos=pos, delay=100)
# end def
@staticmethod
def mouseDrag(widget, press_on, release_on, button,
modifier=None, delay=-1):
""" Convenience helper for dragging a QWidget
Makes a drag with the mouse.
Args:
widget (QWidget):
press_on (QPoint): this is the position where the mouse is pressed.
release_on (QPoint): this is the position where the mouse is released.
"""
if modifier is None:
modifier = Qt.KeyboardModifiers()
QTest.mousePress(widget, button, modifier, pos=press_on, delay=delay)
QTest.mouseMove(widget, pos=release_on, delay=delay)
QTest.mouseRelease(widget, button, modifier, pos=release_on, delay=delay)
# end def
@staticmethod
def graphicsItemDrag(graphics_item, press_on, release_on, button,
modifier=None, delay=-1):
""" Convenience helper for dragging a QGraphicsItem
Args:
graphics_item (QGraphicsItem):
press_on (QPoint): this is the scene position where the mouse is pressed.
release_on (QPoint): this is the scene position where the mouse is released.
"""
gview = graphics_item.scene().views()[0]
press_on = gview.mapFromScene(press_on)
release_on = gview.mapFromScene(release_on)
GUITestApp.mouseDrag(gview.viewport(), press_on, release_on, button,
modifier=modifier, delay=delay)
# end def
########################## Keyboard events ############################
@staticmethod
def typeText(widget, text, delay):
"""Types the text over the given widget."""
for char in text:
QTest.keyClick(widget, char, delay=delay)
########################## Miscellaneous ############################
def processEvents(self):
""" Call this to see changes in GUI from Events
"""
self.app.qApp.processEvents()
############################ Private Methods ############################
@staticmethod
def getItemCenterScenePos(item):
return item.mapToScene(item.boundingRect().center()).toPoint()
@staticmethod
def getQtKey(key):
"""Handles the given key for a KeyEvent.
Returns:
Qt.Key
"""
return _STR_TO_QT[key.lower()]
# end class
KEY_RETURN = '\13'
# constant tables
constants = [
(Qt.Key_Escape, ''),
(Qt.Key_Tab, '\t'),
(Qt.Key_Backspace, '\b'),
(Qt.Key_Return, KEY_RETURN),
(Qt.Key_Enter, KEY_RETURN),
(Qt.Key_Space, ' '),
(Qt.Key_Exclam, '!'),
(Qt.Key_QuoteDbl, '"'),
(Qt.Key_NumberSign, '#'),
(Qt.Key_Dollar, '$'),
(Qt.Key_Percent, '%'),
(Qt.Key_Ampersand, '^'),
(Qt.Key_Apostrophe, '&'),
(Qt.Key_ParenLeft, '('),
(Qt.Key_ParenRight, ')'),
(Qt.Key_Asterisk, '*'),
(Qt.Key_Plus, '+'),
(Qt.Key_Comma, ','),
(Qt.Key_Minus, '-'),
(Qt.Key_Period, '.'),
(Qt.Key_Slash, '/'),
(Qt.Key_0, '0'),
(Qt.Key_1, '1'),
(Qt.Key_2, '2'),
(Qt.Key_3, '3'),
(Qt.Key_4, '4'),
(Qt.Key_5, '5'),
(Qt.Key_6, '6'),
(Qt.Key_7, '7'),
(Qt.Key_8, '8'),
(Qt.Key_9, '9'),
(Qt.Key_Colon, ':'),
(Qt.Key_Semicolon, ';'),
(Qt.Key_Less, '<'),
(Qt.Key_Equal, '='),
(Qt.Key_Greater, '>'),
(Qt.Key_Question, '?'),
(Qt.Key_At, '@'),
(Qt.Key_A, 'a'),
(Qt.Key_B, 'b'),
(Qt.Key_C, 'c'),
(Qt.Key_D, 'd'),
(Qt.Key_E, 'e'),
(Qt.Key_F, 'f'),
(Qt.Key_G, 'g'),
(Qt.Key_H, 'h'),
(Qt.Key_I, 'i'),
(Qt.Key_J, 'j'),
(Qt.Key_K, 'k'),
(Qt.Key_L, 'l'),
(Qt.Key_M, 'm'),
(Qt.Key_N, 'n'),
(Qt.Key_O, 'o'),
(Qt.Key_P, 'p'),
(Qt.Key_Q, 'q'),
(Qt.Key_R, 'r'),
(Qt.Key_S, 's'),
(Qt.Key_T, 't'),
(Qt.Key_U, 'u'),
(Qt.Key_V, 'v'),
(Qt.Key_W, 'w'),
(Qt.Key_X, 'x'),
(Qt.Key_Y, 'y'),
(Qt.Key_Z, 'z'),
(Qt.Key_BracketLeft, '['),
(Qt.Key_Backslash, '\\'),
(Qt.Key_BracketRight, ']'),
(Qt.Key_Underscore, '_'),
(Qt.Key_BraceLeft, '{'),
(Qt.Key_Bar, '|'),
(Qt.Key_BraceRight, '}'),
]
_QT_TO_STR = dict(constants)
_STR_TO_QT = dict([(y, x) for x, y in constants])
del constants
| {
"content_hash": "9a6ea957904dd3b0ba61b52bd1185978",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 88,
"avg_line_length": 31.169082125603865,
"alnum_prop": 0.5361128332300062,
"repo_name": "scholer/cadnano2.5",
"id": "dda71bb598a0db08417f15c1f1a692d678a700fd",
"size": "6477",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cadnano/tests/cnguitestcase.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2617"
},
{
"name": "Python",
"bytes": "1624263"
},
{
"name": "QMake",
"bytes": "3719"
}
],
"symlink_target": ""
} |
import re
import os
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, HtmlResponse
from scrapy.utils.url import urljoin_rfc
from scrapy.utils.response import get_base_url
import csv, codecs, cStringIO
from product_spiders.items import Product, ProductLoader
HERE = os.path.abspath(os.path.dirname(__file__))
class HomeShop18Spider(BaseSpider):
USER_AGENT = "Googlebot/2.1 ( http://www.google.com/bot.html)"
name = 'homeshop18.com'
allowed_domains = ['www.homeshop18.com']
start_urls = ('http://www.homeshop18.com/home-kitchen/category:3503/',
'http://www.homeshop18.com/household-appliances/category:3575/',
'http://www.homeshop18.com/toys-games/category:3335/',
'http://www.homeshop18.com/kids-26-baby/category:3627/',
'http://www.homeshop18.com/gifts-flowers/category:3011/',
'http://www.homeshop18.com/electronics/category:3203/',
'http://www.homeshop18.com/jewellery-watches/category:3376/',
'http://www.homeshop18.com/camera-26-camcorders/category:3159/',
'http://www.homeshop18.com/computer-peripherals/category:3254/',
)
def parse_product(self, response):
hxs = HtmlXPathSelector(response)
name = "".join(hxs.select('//h1[@id="productLayoutForm:pbiName"]/text()').extract()).strip()
price = "".join(hxs.select('//span[@id="productLayoutForm:OurPrice"]/text()').re(r'([0-9\, ]+)')).strip()
if price:
product_loader = ProductLoader(item=Product(), response=response)
product_loader.add_value('price', price)
product_loader.add_value('url', response.url)
product_loader.add_value('name', name)
yield product_loader.load_item()
def parse(self,response):
if not isinstance(response, HtmlResponse):
return
hxs = HtmlXPathSelector(response)
base_url = get_base_url(response)
#get cats
cats = hxs.select('//ul[@id="tree"]/li/ul/li/a/@href').extract()
for cat in cats:
yield Request(cat.strip())
#pages
pages = hxs.select('//div[@class="pagination"]/span/span/a/@href').extract()
for page in pages:
yield Request(page.strip())
products = hxs.select('//div[@class="product_div"]/p/a/@href').extract()
for p in products:
yield Request(p.strip(), callback=self.parse_product)
| {
"content_hash": "9a268737fe9301e00183967785f7f245",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 113,
"avg_line_length": 41.76190476190476,
"alnum_prop": 0.6077537058152793,
"repo_name": "ddy88958620/lib",
"id": "3a73390251d6b3fc9f31a67f185a35c8f03fe794",
"size": "2631",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Python/scrapy/fabfurnish/homeshop18_spider.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FileStorage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('storage', models.FileField(upload_to='storage')),
('html_field_name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='FormStorage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('storage', models.TextField()),
],
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
),
migrations.AddField(
model_name='filestorage',
name='form',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='demoapp.FormStorage'),
),
]
| {
"content_hash": "00ac3fe23774c4a9fad3f32d9e798231",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 114,
"avg_line_length": 32.42857142857143,
"alnum_prop": 0.552863436123348,
"repo_name": "kaleissin/django-multipageforms",
"id": "c985d64b3aec9e1f4a94666ac272111ecd5640ff",
"size": "1436",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/demo/demoapp/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4293"
},
{
"name": "Makefile",
"bytes": "3963"
},
{
"name": "Python",
"bytes": "48814"
}
],
"symlink_target": ""
} |
from django import forms
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
class GoogleMapsWidget(forms.HiddenInput):
"""
Widget for Google Maps object to be displayed
inline in a form
"""
def render(self, name, value,country=None, attrs=None, choices=(), renderer=None):
self.attrs['base_latitude'] = self.attrs.get('base_latitude', u'34.5333')
self.attrs['base_longitude'] = self.attrs.get('base_longitude', u'69.1667')
self.attrs['width'] = self.attrs.get('width', 700)
self.attrs['height'] = self.attrs.get('height', 400)
self.attrs['country'] = self.attrs.get('country', country)
maps_html = u"""
<script type="text/javascript" src="https://maps.google.com/maps/api/js?v=3&key=AIzaSyAc76ZfKuHCvwXAEAiR2vINQPgNRenCf_8&sensor=false"></script>
<script type="text/javascript">
$(document).ready(function(){
// Base lat and long are set to django defaults from above
var base_lat = %(base_latitude)s
var base_long = %(base_longitude)s
// If the lat and long fields have values use those to center the map
function initialize() {
if($('#id_%(latitude)s').val()!=''){
center = new google.maps.LatLng($('#id_%(latitude)s').val(), $('#id_%(longitude)s').val());
}else{
center = new google.maps.LatLng(%(base_latitude)s,%(base_longitude)s);
$('#id_%(latitude)s').val(base_lat);
$('#id_%(longitude)s').val(base_long);
}
var myOptions = {
zoom: 15,
center: center,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
map = new google.maps.Map(document.getElementById("map_canvas"), myOptions);
geocoder = new google.maps.Geocoder();
my_point = new google.maps.Marker({
position: center,
map: map,
draggable: true,
})
// If someone drags the map pointer reset the lat & long in the form
google.maps.event.addListener(my_point, 'dragend', function(event){
$('#id_%(latitude)s').val(event.latLng.lat());
$('#id_%(longitude)s').val(event.latLng.lng());
});
$('#%(longitude)s').parent().parent().hide();
google.maps.event.trigger(map, 'resize');
}
google.maps.event.addDomListener(window, 'load', initialize);
$('a[data-toggle="tab"]').on('shown.bs.tab', function (e) {
console.log("map resize");
initialize();
})
});
// Called from form to geocode address to get lat long for an address (city, country)
function codeAddress(){
google.maps.event.trigger(map, 'resize');
var address = $('#city_country').val();
geocoder.geocode( { 'address': address}, function(results, status) {
if (status == google.maps.GeocoderStatus.OK) {
results_len = results.length
var results_table = new Array();
for(i=0; i<results_len; i++){
address_location = results[i].geometry.location
if(i==0){
set_center(address_location.lat(), address_location.lng());
$('#id_%(latitude)s').val(address_location.lat());
$('#id_%(longitude)s').val(address_location.lng());
}
results_table[i] = '<div style="cursor: pointer" onclick="set_center(' +
address_location.lat() + ', ' +
address_location.lng() + ')">' +
results[i].formatted_address +
'</div>';
}
$('#search_results').html(results_table.join(''));
} else {
alert("Geocode was not successful for the following reason: " + status);
}
});
}
// Called from codeAddress set the center to the coded address lat and long
function set_center(lat, lng){
google.maps.event.trigger(map, 'resize');
latlng = new google.maps.LatLng(lat, lng);
my_point.setPosition(latlng)
map.setCenter(latlng);
}
</script>
<br stlye="clear: both" />
<div style="width: 400px; margin-bottom: 25px; margin-left: 100px">
<div id="search">
<label for="city_county">%(city_country)s</label>
<input id="city_country" type="text" value="%(country)s" class="input-medium search-query"/>
<input class="btn" type="button" value="%(find)s" onclick="codeAddress()" />
</div>
<div id="search_results"><br/>
</div>
<div id="map_canvas" style="width: %(width)ipx; height: %(height)ipx;"></div>
</div>
""" % {'latitude': self.attrs['latitude'], 'longitude': self.attrs['longitude'], 'base_longitude': self.attrs['base_longitude'],
'base_latitude': self.attrs['base_latitude'], 'width': self.attrs['width'], 'height': self.attrs['height'], 'country': self.attrs['country'],
# Translators: Used as a verb, a button label for a search function
'find': _('Find'),
'city_country': _('City, Country:')}
rendered = super(GoogleMapsWidget, self).render(name, value, attrs)
return rendered + mark_safe(maps_html)
| {
"content_hash": "d22aee4ef5ace844ff6a91d0e691ab08",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 160,
"avg_line_length": 47.970588235294116,
"alnum_prop": 0.4613733905579399,
"repo_name": "mercycorps/TolaActivity",
"id": "cb391f3c44a73105a49322628f8747a54a62245d",
"size": "6524",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "workflow/widgets.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "432462"
},
{
"name": "Dockerfile",
"bytes": "109"
},
{
"name": "HTML",
"bytes": "437661"
},
{
"name": "JavaScript",
"bytes": "5654491"
},
{
"name": "Python",
"bytes": "1741812"
},
{
"name": "Shell",
"bytes": "4752"
}
],
"symlink_target": ""
} |
import xbmcgui
import sys
class SyncProgressBarBG(object):
"class to handle the background progressbar"
_pDialog = None
_addon_name = None
def __init__(self, heading):
"create the DialogProgressBG "
self.language = sys.modules["__main__"].language
self._addon_name = heading
self._pDialog = xbmcgui.DialogProgressBG()
self._pDialog.create(heading, self.language(32000))
def update(self, percent, msg):
"wrap the update function"
self._pDialog.update(percent, message=msg)
def update_file_dl(self, file, tot_files, file_number):
"specialized update fn for when downloading file"
text = self.language(32001).format(file_number, tot_files, file)
self._pDialog.update(file_number*100 / tot_files, message=text)
def update_profile(self, tot_profile, profile_number):
"specialized update fn for when changing profile"
self._pDialog.update(profile_number*100 / tot_profile , heading=self._addon_name + " " + self.language(32002).format(profile_number, tot_profile), message=self.language(32000))
def close(self):
"""
close the progress bar
"""
self._pDialog.close()
| {
"content_hash": "1c94d111411ed493f3844fd14fa41e85",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 184,
"avg_line_length": 29.952380952380953,
"alnum_prop": 0.6399046104928457,
"repo_name": "Panfloss/xbmc-script-ftpretriever",
"id": "89046f9be4cd8f715667c80deebcfd2b03b95a68",
"size": "1258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resources/lib/ui.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12355"
}
],
"symlink_target": ""
} |
import sys
from optparse import OptionParser
import importlib
if __name__ == '__main__':
_version = '0.1'
parser = OptionParser(
usage="%prog [options] result.json output_file",
version=_version
)
parser.add_option("-f", "--format", choices=['html', 'pdf'])
(options, args) = parser.parse_args()
print("Desefu export (%s)" % _version)
result_file = None
output_file = 'output_file.html'
try:
result_file = args[0]
except IndexError:
print("Please insert result JSON file generated with DESEFU")
sys.exit()
_format = 'html'
if options.format is not None:
_format = options.format
if _format == 'pdf':
print("Sorry. PDF is not supported yet. Try to open html output file in browser and print it.")
exit()
try:
output_file = args[1] + _format
except IndexError:
print("Using default \"output_file.%s\"" % _format)
print("-------------------------------------------------------------------")
formatter_mod = importlib.import_module('formatter.%s.%sFormatter' % (_format, _format.title()))
formatter_class = getattr(formatter_mod, '%sFormatter' % _format.title())
formatter_obj = formatter_class(result_file, output_file)
formatter_obj.make_file()
print("Done")
| {
"content_hash": "d956e4f8a63baf6c8a583f92af302c1c",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 103,
"avg_line_length": 28.956521739130434,
"alnum_prop": 0.5818318318318318,
"repo_name": "vdjagilev/desefu-export",
"id": "db08a2215c89508e7a2a33845c241eb4f6fe254a",
"size": "1356",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "desefu_export.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8921"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('menu', '0002_auto_20161217_1726'),
]
operations = [
migrations.AlterModelOptions(
name='element',
options={'ordering': ['position'], 'verbose_name': 'Element', 'verbose_name_plural': 'Elements'},
),
migrations.AddField(
model_name='element',
name='position',
field=models.SmallIntegerField(default=0, verbose_name='Position'),
),
]
| {
"content_hash": "3c0c6ed545224fe64dba3c62a98002a8",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 109,
"avg_line_length": 26.90909090909091,
"alnum_prop": 0.589527027027027,
"repo_name": "watchdogpolska/watchdog-kj-kultura",
"id": "13b46d53ba2d99dba7272d2e1014a3b0b6b41eb6",
"size": "665",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "watchdog_kj_kultura/menu/migrations/0003_auto_20161217_1733.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "182971"
},
{
"name": "HTML",
"bytes": "93827"
},
{
"name": "JavaScript",
"bytes": "3024"
},
{
"name": "Python",
"bytes": "168593"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from __future__ import print_function
from . import pathlib
PREDEFINED_VARIABLE_TABLE = {
'HOME': pathlib.HomePath()(),
'MYDOC': pathlib.MyDocumentPath()(),
}
| {
"content_hash": "c58ad95cb1399af5912099e38840a472",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 40,
"avg_line_length": 23.22222222222222,
"alnum_prop": 0.69377990430622,
"repo_name": "if1live/easylinker",
"id": "cb0f496cbd9b184d438e75a65ba325035903a019",
"size": "233",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "easylinker/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16256"
}
],
"symlink_target": ""
} |
"""
Usage:
watch_psutil.py [options]
Options:
--delay <int> Seconds delay inbetween loop runs [default: 4]
--loop Loop the execution infinitely
--carbon-host <str> Host to send carbon stats to [default: carbon.service.consul]
Generic Options:
--loglevel, -L=<str> Loglevel [default: INFO]
(ERROR, CRITICAL, WARN, INFO, DEBUG)
--log2stdout, -l Log to stdout, otherwise to logfile. [default: False]
--logfile, -f=<path> Logfile to log to (default: <scriptname>.log)
--cfg, -c=<path> Configuration file.
-h --help Show this screen.
--version Show version.
"""
# load librarys
import logging
import os
import re
import time
import codecs
import ast
import sys
import psutil
import graphitesend
from os import environ
from ConfigParser import RawConfigParser, NoOptionError
try:
from docopt import docopt
except ImportError:
HAVE_DOCOPT = False
else:
HAVE_DOCOPT = True
__author__ = 'Christian Kniep <christian()qnib.org>'
__copyright__ = 'Copyright 2015 QNIB Solutions'
__license__ = """GPL v2 License (http://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)"""
class QnibConfig(RawConfigParser):
""" Class to abstract config and options
"""
specials = {
'TRUE': True,
'FALSE': False,
'NONE': None,
}
def __init__(self, opt):
""" init """
RawConfigParser.__init__(self)
if opt is None:
self._opt = {
"--log2stdout": False,
"--logfile": None,
"--loglevel": "ERROR",
}
else:
self._opt = opt
self.logformat = '%(asctime)-15s %(levelname)-5s [%(module)s] %(message)s'
self.loglevel = opt['--loglevel']
self.log2stdout = opt['--log2stdout']
if self.loglevel is None and opt.get('--cfg') is None:
print "please specify loglevel (-L)"
sys.exit(0)
self.eval_cfg()
self.eval_opt()
self.set_logging()
logging.info("SetUp of QnibConfig is done...")
def do_get(self, section, key, default=None):
""" Also lent from: https://github.com/jpmens/mqttwarn
"""
try:
val = self.get(section, key)
if val.upper() in self.specials:
return self.specials[val.upper()]
return ast.literal_eval(val)
except NoOptionError:
return default
except ValueError: # e.g. %(xxx)s in string
return val
except:
raise
return val
def config(self, section):
''' Convert a whole section's options (except the options specified
explicitly below) into a dict, turning
[config:mqtt]
host = 'localhost'
username = None
list = [1, 'aaa', 'bbb', 4]
into
{u'username': None, u'host': 'localhost', u'list': [1, 'aaa', 'bbb', 4]}
Cannot use config.items() because I want each value to be
retrieved with g() as above
SOURCE: https://github.com/jpmens/mqttwarn
'''
d = None
if self.has_section(section):
d = dict((key, self.do_get(section, key))
for (key) in self.options(section) if key not in ['targets'])
return d
def eval_cfg(self):
""" eval configuration which overrules the defaults
"""
cfg_file = self._opt.get('--cfg')
if cfg_file is not None:
fd = codecs.open(cfg_file, 'r', encoding='utf-8')
self.readfp(fd)
fd.close()
self.__dict__.update(self.config('defaults'))
def eval_opt(self):
""" Updates cfg according to options """
def handle_logfile(val):
""" transforms logfile argument
"""
if val is None:
logf = os.path.splitext(os.path.basename(__file__))[0]
self.logfile = "%s.log" % logf.lower()
else:
self.logfile = val
self._mapping = {
'--logfile': lambda val: handle_logfile(val),
}
for key, val in self._opt.items():
if key in self._mapping:
if isinstance(self._mapping[key], str):
self.__dict__[self._mapping[key]] = val
else:
self._mapping[key](val)
break
else:
if val is None:
continue
mat = re.match("\-\-(.*)", key)
if mat:
self.__dict__[mat.group(1)] = val
else:
logging.info("Could not find opt<>cfg mapping for '%s'" % key)
def set_logging(self):
""" sets the logging """
self._logger = logging.getLogger()
self._logger.setLevel(logging.DEBUG)
if self.log2stdout:
hdl = logging.StreamHandler()
hdl.setLevel(self.loglevel)
formatter = logging.Formatter(self.logformat)
hdl.setFormatter(formatter)
self._logger.addHandler(hdl)
else:
hdl = logging.FileHandler(self.logfile)
hdl.setLevel(self.loglevel)
formatter = logging.Formatter(self.logformat)
hdl.setFormatter(formatter)
self._logger.addHandler(hdl)
def __str__(self):
""" print human readble """
ret = []
for key, val in self.__dict__.items():
if not re.match("_.*", key):
ret.append("%-15s: %s" % (key, val))
return "\n".join(ret)
def __getitem__(self, item):
""" return item from opt or __dict__
:param item: key to lookup
:return: value of key
"""
if item in self.__dict__.keys():
return self.__dict__[item]
else:
return self._opt[item]
class WatchPs(object):
""" Class to hold the functioanlity of the script
"""
def __init__(self, cfg):
""" Init of instance
"""
self._cfg = cfg
if 'SKIP_PSWATCH' in environ:
time.sleep(1)
sys.exit(0)
self.cnt_gsend()
def loop(self):
""" loop over run
"""
while True:
self.run()
time.sleep(int(self._cfg['--delay']))
def cnt_gsend(self):
if 'CNT_TYPE' in environ:
pre = "psutil.%s" % environ['CNT_TYPE']
else:
pre = "psutil.notype"
try:
self._gsend = graphitesend.init(graphite_server=self._cfg['--carbon-host'], prefix=pre)
except graphitesend.GraphiteSendException:
time.sleep(5)
self.cnt_gsend()
def run(self):
""" run the function
"""
cnt = {
"state": {},
"user": {}
}
for proc in psutil.process_iter():
try:
pinfo = proc.as_dict(attrs=['pid', 'name', 'username'])
pinfo['num_threads'] = proc.num_threads()
cpu_times = proc.cpu_times()
pinfo['cpu_user'], pinfo['cpu_system'] = cpu_times.user, cpu_times.system
pinfo['state'] = proc.status()
ctx_sw = proc.num_ctx_switches()
pinfo['ctx_sw_vol'], pinfo['ctx_sw_invol'] = ctx_sw.voluntary, ctx_sw.involuntary
pinfo['ctx_fds'] = proc.num_fds()
except psutil.NoSuchProcess:
pass
except psutil.AccessDenied:
pass
else:
if pinfo['username'] != "root":
if pinfo['username'] not in cnt['user']:
cnt['user'][pinfo['username']] = 0
cnt['user'][pinfo['username']] += 1
if pinfo['name'] == 'bash':
continue
if pinfo['name'] == 'python':
mat = re.match(".*/([a-zA-Z0-9\_\-]+)\.py", proc.cmdline()[1])
if mat:
pinfo['name'] = mat.group(1)
if pinfo['name'] in ("watch_psutil", "sleep"):
continue
if pinfo['state'] not in cnt['state']:
cnt['state'][pinfo['state']] = 0
cnt['state'][pinfo['state']] += 1
if pinfo['state'] != "running":
continue
mkey = "%(username)s.%(name)s" % pinfo
for key, val in pinfo.items():
if key in ('pid','name','username', 'state'):
continue
self._gsend.send("%s.%s" % (mkey, key), val)
self._cfg._logger.debug("%s.%s %s" % (mkey, key, val))
for key, val in cnt['user'].items():
self._gsend.send("user.%s" % key, val)
self._cfg._logger.debug("state.%s %s" % (key, val))
for key, val in cnt['state'].items():
self._gsend.send("state.%s" % key, val)
self._cfg._logger.debug("state.%s %s" % (key, val))
def main():
""" main function """
options = None
if HAVE_DOCOPT:
options = docopt(__doc__, version='Watch psutil 0.1')
qcfg = QnibConfig(options)
proc = WatchPs(qcfg)
if qcfg['--loop']:
proc.loop()
else:
proc.run()
if __name__ == "__main__":
main()
| {
"content_hash": "f6ed7e04900e8165e42188a6b4d46a21",
"timestamp": "",
"source": "github",
"line_count": 300,
"max_line_length": 99,
"avg_line_length": 32.026666666666664,
"alnum_prop": 0.48844712739383844,
"repo_name": "ChristianKniep/docker-terminal",
"id": "bc2e64cb477470d044a66936a6da4c36f12c326e",
"size": "9656",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opt/qnib/bin/watch_psutil.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "128"
},
{
"name": "Python",
"bytes": "14977"
},
{
"name": "Shell",
"bytes": "1908"
}
],
"symlink_target": ""
} |
from nougat import Nougat
app = Nougat()
async def middleware(response):
response.content = 'Hello world'
app.use(middleware)
app.manager.up()
| {
"content_hash": "a2a754f75798d42bf6cb869e13e01bb0",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 36,
"avg_line_length": 11.846153846153847,
"alnum_prop": 0.7142857142857143,
"repo_name": "Kilerd/nougat",
"id": "b11f400e59b53b73bdad045ca5089a01a47a3906",
"size": "154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demos/manage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "32143"
}
],
"symlink_target": ""
} |
import logging
from apps.atencion.forms.ConsultaForm import ConsultaForm
from apps.atencion.forms.DetalleRecetaForm import DetalleRecetaForm
from apps.atencion.forms.TratamientoForm import TratamientoForm
from apps.atencion.models import Consulta, AntecedenteMedico, DiagnosticoConsulta, Tratamiento, DetalleReceta
log = logging.getLogger(__name__)
from apps.utils.security import SecurityKey, log_params, UserToken, get_dep_objects
from django import http
from django.conf.locale import da
from django.core.urlresolvers import reverse, reverse_lazy
from django.db import transaction
from django.shortcuts import render, render_to_response, redirect
from django.views.generic import TemplateView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from django.views.generic.list import ListView
from django.core import serializers
from django.http import HttpResponse, request
from django.db.models import Max, Sum, Count
from django.contrib import messages
from django.shortcuts import get_list_or_404, get_object_or_404
from datetime import datetime, time, date
from django.contrib.auth import authenticate, login, logout
from django.utils.encoding import force_text
from django.contrib.messages.views import SuccessMessageMixin
from apps.utils.decorators import permission_resource_required
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect
from apps.utils.forms import empty
import json
from django.utils.text import capfirst, get_text_list
from .forms.PersonaForm import PersonaForm
from .forms.LaboratorioForm import LaboratorioForm
from .forms.ProductoForm import ProductoForm
from .forms.PeriodoForm import PeriodoForm
from .forms.FuncionesVitalesForm import FuncionesVitalesForm
from .forms.UnidadMedidaForm import UnidadMedidaForm
from .forms.HistoriaForm import HistoriaForm
from .forms.DiagnosticoForm import DiagnosticoForm
from .forms.AntecendeMedicoForm import AntecedenteMedicoForm
from .models import (Persona, Producto, Laboratorio, FuncionesVitales,
Periodo, Diagnostico, UnidadMedida, Historia, Departamento, Provincia, Distrito,ReporteAtencion)
# class Persona==============================================================================
class PersonaListView(ListView):
model = Persona
template_name = 'persona/persona_list.html'
paginate_by = settings.PER_PAGE
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
return super(PersonaListView, self).dispatch(request, *args, **kwargs)
def get_paginate_by(self, queryset):
if 'all' in self.request.GET:
return None
return ListView.get_paginate_by(self, queryset)
def get_queryset(self):
self.o = empty(self.request, 'o', '-id')
self.f = empty(self.request, 'f', 'nombres')
self.q = empty(self.request, 'q', '')
column_contains = u'%s__%s' % (self.f, 'contains')
return self.model.objects.filter(**{column_contains: self.q}).order_by(self.o)
def get_context_data(self, **kwargs):
context = super(PersonaListView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'persona'
context['title'] = _('Select %s to change') % capfirst(_('Persona'))
context['o'] = self.o
context['f'] = self.f
context['q'] = self.q.replace('/', '-')
return context
class ProvinciaAjax(TemplateView):
"""docstring for BusquedaAjaxView"""
def get(self, request, *args, **kwargs):
options = '<option value="" selected="selected">---------</option>'
id_departamento = request.GET.get('id')
if id_departamento:
provincias = Provincia.objects.filter(departamento__id=id_departamento)
else:
provincias = Provincia.objects.filter(departamento__id=0)
# data = serializers.serialize('json', distritos, fields=('id', 'distrito'))
for provincia in provincias:
options += '<option value="%s">%s</option>' % (
provincia.pk,
provincia.nombre
)
response = {}
response['provincias'] = options
return http.JsonResponse(response)
class DistritoAjax(TemplateView):
"""docstring for BusquedaAjaxView"""
def get(self, request, *args, **kwargs):
options = '<option value="" selected="selected">---------</option>'
id_provincia = request.GET.get('id')
if id_provincia:
distritos = Distrito.objects.filter(provincia__id=id_provincia)
else:
distritos = Distrito.objects.filter(provincia__id=0)
# data = serializers.serialize('json', distritos, fields=('id', 'distrito'))
for distrito in distritos:
options += '<option value="%s">%s</option>' % (
distrito.pk,
distrito.nombre
)
response = {}
response['distritos'] = options
return http.JsonResponse(response)
class PersonaCreateView(CreateView):
model = Persona
form_class = PersonaForm
template_name = 'persona/persona_add.html'
success_url = reverse_lazy('atencion:persona_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(PersonaCreateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(PersonaCreateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'persona'
context['title'] = ('Agregar %s') % ('Persona')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = (' %(name)s "%(obj)s" fue creado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(PersonaCreateView, self).form_valid(form)
class PersonaUpdateView(UpdateView):
model = Persona
template_name = 'persona/persona_add.html'
form_class = PersonaForm
success_url = reverse_lazy('atencion:persona_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(PersonaUpdateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(PersonaUpdateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'persona'
context['title'] = _('Add %s') % _('Persona')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _('%(name)s "%(obj)s" fue cambiado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(PersonaUpdateView, self).form_valid(form)
class PersonaDeleteView(DeleteView):
model = Persona
success_url = reverse_lazy('atencion:persona_list')
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
try:
self.get_object()
except Exception as e:
messages.error(self.request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
return super(PersonaDeleteView, self).dispatch(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
try:
d = self.get_object()
deps, msg = get_dep_objects(d)
print(deps)
if deps:
messages.warning(self.request, ('No se puede Eliminar %(name)s') % {
"name": capfirst(force_text(self.model._meta.verbose_name))
+ ' "' + force_text(d) + '"'
})
raise Exception(msg)
d.delete()
msg = _(' %(name)s "%(obj)s" fue eliminado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(d)
}
if not d.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
except Exception as e:
messages.error(request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
def get(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
class HitoriaBusquedaTemplateView(TemplateView):
"""Historia Template View.
Clase usada para buscar el historial de una persona.
"""
template_name = "historial/busqueda.html"
formulario = HistoriaForm
def get_context_data(self, **kwargs):
context = super(HitoriaBusquedaTemplateView, self).get_context_data(**kwargs)
context['form'] = self.formulario
def get(self, request, *args, **kwargs):
codigo = request.GET.get('codigo')
estudiante = None
persona = None
matriculado = None
try:
personaes = Persona.objects.get(codigo=codigo) # Busca por el codigo de estudiante
except Exception as e:
personaes = None
#msg =("La persona no Existe)
try:
personaex = Persona.objects.get(dni=codigo)
except Exception as e:
personaex = None
if personaes:
persona = personaes
if persona.es_matriculado:
matriculado = "Matriculado"
if persona.es_estudiante:
estudiante = "Estudiante"
if personaex:
persona = personaex
if persona.es_estudiante:
estudiante = "Estudiante"
if persona.es_matriculado:
matriculado = 'Matriculado'
try:
historia = Historia.objects.get(persona__id=persona.id)
print(historia)
except Exception as e:
historia = None
context = {'persona': persona, 'historia': historia, 'estudiante': estudiante, 'matriculado': matriculado }
#messages.success(self.request, msg)
#log.warning(msg, extra=log_params(self.request))
return self.render_to_response(context)
class HitoriaCreateView(CreateView):
model = Historia
form_class = HistoriaForm
template_name = 'historial/historia_add.html'
def get_success_url(self):
return reverse('atencion:historia_detail', kwargs={'pk': self.object.pk})
def form_valid(self, form):
self.object = form.save(commit=False)
persona = Persona.objects.get(id=self.request.POST['persona'])
if persona.es_estudiante:
self.object.numero = persona.codigo
else:
self.object.numero = persona.dni
return super(HitoriaCreateView, self).form_valid(form)
class HitoriaDetailView(DetailView):
model = Historia
form_f_vitales = FuncionesVitalesForm
template_name = 'historial/historia_detail.html'
form_consulta = ConsultaForm
form_tratamiento = TratamientoForm
form_antecedente = AntecedenteMedicoForm
form_receta = DetalleRecetaForm
def get_context_data(self, **kwargs):
context = super(HitoriaDetailView, self).get_context_data(**kwargs)
try:
antecedente = AntecedenteMedico.objects.get(historia=self.object)
except Exception as e:
antecedente = None
context['form'] = self.form_f_vitales
context['form_receta'] = self.form_receta
context['form_antecedente'] = self.form_antecedente
context['form_consulta'] = self.form_consulta
context['form_tratamiento'] = self.form_tratamiento
consulta = Consulta.objects.filter(historia=self.object).filter(estado=False).last()
context['consulta'] = consulta
context['antecedente'] = antecedente
try:
context['proceso'] = Consulta.objects.get(estado=True, historia=self.object)
except Exception as e:
context['proceso'] = None
return context
class DiagnosticoConsultaCreate(TemplateView):
def post(self, request):
sid = transaction.savepoint()
try:
proceso = json.loads(request.POST.get('proceso'))
historiaid = proceso['historia']
historia = Historia.objects.get(id=historiaid)
consulta = Consulta.objects.get(historia=historia, estado=True)
consulta.examen_fisico = proceso['examen']
consulta.enfermedad_actual = proceso['enfermedad']
consulta.hecho = True
consulta.estado = False
consulta.save()
tratamiento = Tratamiento()
tratamiento.recomendacion = proceso['recomendacion']
tratamiento.consulta = consulta
tratamiento.save()
for c in proceso['medicamento']:
producto = Producto.objects.get(codigo=c['codigo'])
presentacion = UnidadMedida.objects.get(id=c['presentacion'])
receta = DetalleReceta()
receta.tratamiento = tratamiento
receta.producto = producto
receta.cantidad = c['cantidad']
receta.presentacion = presentacion
receta.dosis = c['dosis']
receta.periodo = c['periodo']
receta.save()
for c in proceso['diagnostico']:
diagonostico = Diagnostico.objects.get(id=c['pkey'])
diag = DiagnosticoConsulta()
diag.diagnostico = diagonostico
diag.consulta = consulta
diag.save()
except Exception as e:
print(e)
return HttpResponseRedirect(reverse('atencion:historia_detail', kwargs={'pk': historia.pk}))
class DiagnosticoBuscar(TemplateView):
def get(self, request, *args, **kwargs):
codigo = request.GET.get('codigo')
diagnostico = Diagnostico.objects.get(codigo=codigo)
data = serializers.serialize('json', [diagnostico,])
return HttpResponse(data, content_type='application/json')
class AntecedenteCreateView(CreateView):
model = AntecedenteMedico
form_class = AntecedenteMedicoForm
def get_success_url(self):
return reverse('atencion:historia_detail', kwargs={'pk': self.object.historia.pk})
def form_valid(self, form):
self.object = form.save(commit=False)
historiaid = self.request.POST['historia']
historia = Historia.objects.get(id=historiaid)
self.object.historia = historia
return super(AntecedenteCreateView, self).form_valid(form)
# class Producto==============================================================================
class ProductoListView(ListView):
model = Producto
template_name = 'producto/producto_list.html'
paginate_by = settings.PER_PAGE
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
return super(ProductoListView, self).dispatch(request, *args, **kwargs)
def get_paginate_by(self, queryset):
if 'all' in self.request.GET:
return None
return ListView.get_paginate_by(self, queryset)
def get_queryset(self):
self.o = empty(self.request, 'o', '-id')
self.f = empty(self.request, 'f', 'codigo')
self.q = empty(self.request, 'q', '')
column_contains = u'%s__%s' % (self.f, 'contains')
return self.model.objects.filter(**{column_contains: self.q}).order_by(self.o)
def get_context_data(self, **kwargs):
context = super(ProductoListView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'producto'
context['title'] = _('Select %s to change') % capfirst(_('Producto'))
context['o'] = self.o
context['f'] = self.f
context['q'] = self.q.replace('/', '-')
return context
class ProductoBuscarAjaxView(TemplateView):
def get(self, request, *args, **kwargs):
codigo = request.GET.get('codigo')
print('llego hasta el post')
object = Producto.objects.get(codigo=codigo)
print(object)
data = serializers.serialize('json', [object,])
return HttpResponse(data, content_type='application/json')
class ProductoCreateView(CreateView):
model = Producto
form_class = ProductoForm
template_name = 'producto/producto_add.html'
success_url = reverse_lazy('atencion:producto_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(ProductoCreateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ProductoCreateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'producto'
context['title'] = ('Agregar %s') % ('Producto')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _(' %(name)s "%(obj)s" fue creado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(ProductoCreateView, self).form_valid(form)
class ProductoUpdateView(UpdateView):
model = Producto
template_name = 'producto/producto_add.html'
form_class = ProductoForm
success_url = reverse_lazy('atencion:producto_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(ProductoUpdateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ProductoUpdateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'producto'
context['title'] = _('Add %s') % _('Producto')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _('%(name)s "%(obj)s" fue cambiado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(ProductoUpdateView, self).form_valid(form)
class ProductoDeleteView(DeleteView):
model = Producto
success_url = reverse_lazy('atencion:producto_list')
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
try:
self.get_object()
except Exception as e:
messages.error(self.request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
return super(ProductoDeleteView, self).dispatch(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
try:
d = self.get_object()
deps, msg = get_dep_objects(d)
print(deps)
if deps:
messages.warning(self.request, ('No se puede Eliminar %(name)s') % {
"name": capfirst(force_text(self.model._meta.verbose_name))
+ ' "' + force_text(d) + '"'
})
raise Exception(msg)
d.delete()
msg = _(' %(name)s "%(obj)s" fue eliminado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(d)
}
if not d.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
except Exception as e:
messages.error(request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
def get(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
# class Laboratorio==============================================================================
class LaboratorioListView(ListView):
model = Laboratorio
template_name = 'laboratorio/laboratorio_list.html'
paginate_by = settings.PER_PAGE
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
return super(LaboratorioListView, self).dispatch(request, *args, **kwargs)
def get_paginate_by(self, queryset):
if 'all' in self.request.GET:
return None
return ListView.get_paginate_by(self, queryset)
def get_queryset(self):
self.o = empty(self.request, 'o', '-id')
self.f = empty(self.request, 'f', 'hemoglobina')
self.q = empty(self.request, 'q', '')
column_contains = u'%s__%s' % (self.f, 'contains')
return self.model.objects.filter(**{column_contains: self.q}).order_by(self.o)
def get_context_data(self, **kwargs):
context = super(LaboratorioListView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'laboratorio'
context['title'] = _('Select %s to change') % capfirst(_('Laboratorio'))
context['o'] = self.o
context['f'] = self.f
context['q'] = self.q.replace('/', '-')
return context
class LaboratorioCreateView(CreateView):
model = Laboratorio
form_class = LaboratorioForm
template_name = 'laboratorio/laboratorio_add.html'
success_url = reverse_lazy('atencion:laboratorio_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(LaboratorioCreateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(LaboratorioCreateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'laboratorio'
context['title'] = ('Agregar %s') % ('Laboratorio')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _(' %(name)s "%(obj)s" fue creado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(LaboratorioCreateView, self).form_valid(form)
class LaboratorioUpdateView(UpdateView):
model = Laboratorio
template_name = 'laboratorio/laboratorio_add.html'
form_class = LaboratorioForm
success_url = reverse_lazy('atencion:laboratorio_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(LaboratorioUpdateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(LaboratorioUpdateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'laboratorio'
context['title'] = _('Add %s') % _('Laboratorio')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _('%(name)s "%(obj)s" fue cambiado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(LaboratorioUpdateView, self).form_valid(form)
class LaboratorioDeleteView(DeleteView):
model = Laboratorio
success_url = reverse_lazy('atencion:laboratorio_list')
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
try:
self.get_object()
except Exception as e:
messages.error(self.request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
return super(LaboratorioDeleteView, self).dispatch(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
try:
d = self.get_object()
deps, msg = get_dep_objects(d)
print(deps)
if deps:
messages.warning(self.request, ('No se puede Eliminar %(name)s') % {
"name": capfirst(force_text(self.model._meta.verbose_name))
+ ' "' + force_text(d) + '"'
})
raise Exception(msg)
d.delete()
msg = _(' %(name)s "%(obj)s" fuel eliminado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(d)
}
if not d.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
except Exception as e:
messages.error(request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
def get(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
# class FuncionesVitales==============================================================================
class FuncionesVitalesListView(ListView):
model = FuncionesVitales
template_name = 'funciones_vitales/funcionesvitales_list.html'
paginate_by = settings.PER_PAGE
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
return super(FuncionesVitalesListView, self).dispatch(request, *args, **kwargs)
def get_paginate_by(self, queryset):
if 'all' in self.request.GET:
return None
return ListView.get_paginate_by(self, queryset)
def get_queryset(self):
self.o = empty(self.request, 'o', '-id')
self.f = empty(self.request, 'f', 'peso')
self.q = empty(self.request, 'q', '')
column_contains = u'%s__%s' % (self.f, 'contains')
return self.model.objects.filter(**{column_contains: self.q}).order_by(self.o)
def get_context_data(self, **kwargs):
context = super(FuncionesVitalesListView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'funcionesvitales'
context['title'] = _('Select %s to change') % capfirst(_('FuncionesVitales'))
context['o'] = self.o
context['f'] = self.f
context['q'] = self.q.replace('/', '-')
return context
class FuncionesVitalesCreateView(CreateView):
model = FuncionesVitales
form_class = FuncionesVitalesForm
template_name = 'funciones_vitales/funcionesvitales_add.html'
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(FuncionesVitalesCreateView, self).dispatch(request, *args, **kwargs)
def get_success_url(self):
return reverse('atencion:historia_detail', kwargs={'pk': self.object.consulta.historia.pk})
def form_valid(self, form):
self.object = form.save(commit=False)
historiaid = self.request.POST['historia']
historia = Historia.objects.get(id=historiaid)
consulta = Consulta()
consulta.historia = historia
consulta.save()
self.object.consulta = consulta
return super(FuncionesVitalesCreateView, self).form_valid(form)
class FuncionesVitalesUpdateView(UpdateView):
model = FuncionesVitales
template_name = 'funciones_vitales/funcionesvitales_add.html'
form_class = FuncionesVitalesForm
success_url = reverse_lazy('atencion:funcionesvitales_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(FuncionesVitalesUpdateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(FuncionesVitalesUpdateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'funcionesvitales'
context['title'] = _('Add %s') % _('FuncionesVitales')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _('%(name)s "%(obj)s" fue cambiado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(FuncionesVitalesUpdateView, self).form_valid(form)
class FuncionesVitalesDeleteView(DeleteView):
model = FuncionesVitales
success_url = reverse_lazy('atencion:funcionesvitales_list')
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
try:
self.get_object()
except Exception as e:
messages.error(self.request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
return super(FuncionesVitalesDeleteView, self).dispatch(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
try:
d = self.get_object()
deps, msg = get_dep_objects(d)
print(deps)
if deps:
messages.warning(self.request, ('No se puede Eliminar %(name)s') % {
"name": capfirst(force_text(self.model._meta.verbose_name))
+ ' "' + force_text(d) + '"'
})
raise Exception(msg)
d.delete()
msg = _(' %(name)s "%(obj)s" fuel eliminado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(d)
}
if not d.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
except Exception as e:
messages.error(request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
def get(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
# class Periodo==============================================================================
class PeriodoListView(ListView):
model = Periodo
template_name = 'periodo/periodo_list.html'
paginate_by = settings.PER_PAGE
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
return super(PeriodoListView, self).dispatch(request, *args, **kwargs)
def get_paginate_by(self, queryset):
if 'all' in self.request.GET:
return None
return ListView.get_paginate_by(self, queryset)
def get_queryset(self):
self.o = empty(self.request, 'o', '-id')
self.f = empty(self.request, 'f', 'ciclo')
self.q = empty(self.request, 'q', '')
column_contains = u'%s__%s' % (self.f, 'contains')
return self.model.objects.filter(**{column_contains: self.q}).order_by(self.o)
def get_context_data(self, **kwargs):
context = super(PeriodoListView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'periodo'
context['title'] = _('Select %s to change') % capfirst(_('Periodo'))
context['o'] = self.o
context['f'] = self.f
context['q'] = self.q.replace('/', '-')
return context
class PeriodoCreateView(CreateView):
model = Periodo
form_class = PeriodoForm
template_name = 'periodo/periodo_add.html'
success_url = reverse_lazy('atencion:periodo_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(PeriodoCreateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(PeriodoCreateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'periodo'
context['title'] = ('Agregar %s') % ('Periodo')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _(' %(name)s "%(obj)s" fue creado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(PeriodoCreateView, self).form_valid(form)
class PeriodoUpdateView(UpdateView):
model = Periodo
template_name = 'periodo/periodo_add.html'
form_class = PeriodoForm
success_url = reverse_lazy('atencion:periodo_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(PeriodoUpdateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(PeriodoUpdateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'periodo'
context['title'] = _('Add %s') % _('Periodo')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _('%(name)s "%(obj)s" fue cambiado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(PeriodoUpdateView, self).form_valid(form)
class PeriodoDeleteView(DeleteView):
model = Periodo
success_url = reverse_lazy('atencion:periodo_list')
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
try:
self.get_object()
except Exception as e:
messages.error(self.request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
return super(PeriodoDeleteView, self).dispatch(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
try:
d = self.get_object()
deps, msg = get_dep_objects(d)
print(deps)
if deps:
messages.warning(self.request, ('No se puede Eliminar %(name)s') % {
"name": capfirst(force_text(self.model._meta.verbose_name))
+ ' "' + force_text(d) + '"'
})
raise Exception(msg)
d.delete()
msg = _(' %(name)s "%(obj)s" fue eliminado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(d)
}
if not d.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
except Exception as e:
messages.error(request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
def get(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
# class Diagnostico==============================================================================
class DiagnosticoListView(ListView):
model = Diagnostico
template_name = 'diagnostico/diagnostico_list.html'
paginate_by = settings.PER_PAGE
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
return super(DiagnosticoListView, self).dispatch(request, *args, **kwargs)
def get_paginate_by(self, queryset):
if 'all' in self.request.GET:
return None
return ListView.get_paginate_by(self, queryset)
def get_queryset(self):
self.o = empty(self.request, 'o', '-id')
self.f = empty(self.request, 'f', 'codigo')
self.q = empty(self.request, 'q', '')
column_contains = u'%s__%s' % (self.f, 'contains')
return self.model.objects.filter(**{column_contains: self.q}).order_by(self.o)
def get_context_data(self, **kwargs):
context = super(DiagnosticoListView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'diagnostio'
context['title'] = _('Select %s to change') % capfirst(_('Diagnostico'))
context['o'] = self.o
context['f'] = self.f
context['q'] = self.q.replace('/', '-')
return context
class DiagnosticoCreateView(CreateView):
model = Diagnostico
form_class = DiagnosticoForm
template_name = 'diagnostico/diagnostico_add.html'
success_url = reverse_lazy('atencion:diagnostico_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(DiagnosticoCreateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(DiagnosticoCreateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'diagnostico'
context['title'] = ('Agregar %s') % ('Diagnostico')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _(' %(name)s "%(obj)s" fue creado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(DiagnosticoCreateView, self).form_valid(form)
class DiagnosticoUpdateView(UpdateView):
model = Diagnostico
template_name = 'diagnostico/diagnostico_add.html'
form_class = DiagnosticoForm
success_url = reverse_lazy('atencion:diagnostico_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(DiagnosticoUpdateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(DiagnosticoUpdateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'diagnostico'
context['title'] = _('Add %s') % _('Diagnostico')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _('%(name)s "%(obj)s" fue cambiado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(DiagnosticoUpdateView, self).form_valid(form)
class DiagnosticoDeleteView(DeleteView):
model = Diagnostico
success_url = reverse_lazy('atencion:pdiagnostico_list')
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
try:
self.get_object()
except Exception as e:
messages.error(self.request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
return super(DiagnosticoDeleteView, self).dispatch(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
try:
d = self.get_object()
deps, msg = get_dep_objects(d)
print(deps)
if deps:
messages.warning(self.request, ('No se puede Eliminar %(name)s') % {
"name": capfirst(force_text(self.model._meta.verbose_name))
+ ' "' + force_text(d) + '"'
})
raise Exception(msg)
d.delete()
msg = _(' %(name)s "%(obj)s" fue eliminado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(d)
}
if not d.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
except Exception as e:
messages.error(request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
def get(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
# UnidadMedida==============================================================================
class UnidadMedidaListView(ListView):
model = UnidadMedida
template_name = 'unidad_medida/unidadmedida_list.html'
paginate_by = settings.PER_PAGE
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
return super(UnidadMedidaListView, self).dispatch(request, *args, **kwargs)
def get_paginate_by(self, queryset):
if 'all' in self.request.GET:
return None
return ListView.get_paginate_by(self, queryset)
def get_queryset(self):
self.o = empty(self.request, 'o', '-id')
self.f = empty(self.request, 'f', 'codigo')
self.q = empty(self.request, 'q', '')
column_contains = u'%s__%s' % (self.f, 'contains')
return self.model.objects.filter(**{column_contains: self.q}).order_by(self.o)
def get_context_data(self, **kwargs):
context = super(UnidadMedidaListView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'unidadmedida'
context['title'] = _('Select %s to change') % capfirst(_('UnidadMedida'))
context['o'] = self.o
context['f'] = self.f
context['q'] = self.q.replace('/', '-')
return context
class UnidadMedidaCreateView(CreateView):
model = UnidadMedida
form_class = UnidadMedidaForm
template_name = 'unidad_medida/unidadmedida_add.html'
success_url = reverse_lazy('atencion:unidadmedida_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(UnidadMedidaCreateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(UnidadMedidaCreateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'unidadmedida'
context['title'] = ('Agregar %s') % ('UnidadMedida')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _(' %(name)s "%(obj)s" fue creado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(UnidadMedidaCreateView, self).form_valid(form)
class UnidadMedidaUpdateView(UpdateView):
model = UnidadMedida
template_name = 'unidad_medida/unidadmedida_add.html'
form_class = UnidadMedidaForm
success_url = reverse_lazy('atencion:unidadmedida_list')
@method_decorator(permission_resource_required )
def dispatch(self, request, *args, **kwargs):
return super(UnidadMedidaUpdateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(UnidadMedidaUpdateView, self).get_context_data(**kwargs)
context['opts'] = self.model._meta
context['cmi'] = 'unidadmedida'
context['title'] = _('Add %s') % _('UnidadMedida')
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.usuario = self.request.user
msg = _('%(name)s "%(obj)s" fue cambiado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(self.object)
}
if self.object.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
return super(UnidadMedidaUpdateView, self).form_valid(form)
class UnidadMedidaDeleteView(DeleteView):
model = UnidadMedida
success_url = reverse_lazy('atencion:periodo_list')
@method_decorator(permission_resource_required)
def dispatch(self, request, *args, **kwargs):
try:
self.get_object()
except Exception as e:
messages.error(self.request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
return super(UnidadMedidaDeleteView, self).dispatch(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
try:
d = self.get_object()
deps, msg = get_dep_objects(d)
print(deps)
if deps:
messages.warning(self.request, ('No se puede Eliminar %(name)s') % {
"name": capfirst(force_text(self.model._meta.verbose_name))
+ ' "' + force_text(d) + '"'
})
raise Exception(msg)
d.delete()
msg = _(' %(name)s "%(obj)s" fue eliminado satisfactoriamente.') % {
'name': capfirst(force_text(self.model._meta.verbose_name)),
'obj': force_text(d)
}
if not d.id:
messages.success(self.request, msg)
log.warning(msg, extra=log_params(self.request))
except Exception as e:
messages.error(request, e)
log.warning(force_text(e), extra=log_params(self.request))
return HttpResponseRedirect(self.success_url)
def get(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
# class reportes==============================================================================
from highcharts.views import HighChartsLineView, HighChartsBarView
class BarView(HighChartsLineView):
categories = [1,2,3]
@property
def series(self):
consultas = Consulta.objects.extra({'atencion':"date(fecha)"}).values('atencion').annotate(count=Count('id'))[:3]
result = []
data = []
names = []
i = 0
while i<len(consultas):
data.append(consultas[i]['count'])
names.append(consultas[i]['atencion'])
result.append({'name': names , "data":data })
i = i+1
"""
while i < len(consultas):
data.append(consultas[i]['count'])
names.append(consultas[i]['atencion'])
result.append({'name':names , "data": data})
i = i + 1
"""
return result
def vista(request):
return render(request, 'reportes/atencion.html')
| {
"content_hash": "ffc94da2d7674ec957d11358cad8c02a",
"timestamp": "",
"source": "github",
"line_count": 1427,
"max_line_length": 121,
"avg_line_length": 34.73791170287316,
"alnum_prop": 0.6069879566682133,
"repo_name": "upeu-jul-20161-epis-ads2/MedicFast",
"id": "5fa7776e304499a8961fb1bd5e5502a1de6dd155",
"size": "49571",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/atencion/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "396107"
},
{
"name": "HTML",
"bytes": "754753"
},
{
"name": "JavaScript",
"bytes": "1847173"
},
{
"name": "PHP",
"bytes": "1097"
},
{
"name": "Python",
"bytes": "458608"
}
],
"symlink_target": ""
} |
from scrapy import signals
class CrawrcourseSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| {
"content_hash": "32113b20bcdb50e60cb585002dee04af",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 78,
"avg_line_length": 35.48979591836735,
"alnum_prop": 0.6595744680851063,
"repo_name": "qrsforever/workspace",
"id": "519c05e4ad34b65c57a028d2f1759afa2ab279f9",
"size": "1909",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/test/crawl_r_course/crawRCourse/middlewares.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "208"
},
{
"name": "C",
"bytes": "591303"
},
{
"name": "C++",
"bytes": "98511"
},
{
"name": "CLIPS",
"bytes": "52178"
},
{
"name": "HTML",
"bytes": "1780"
},
{
"name": "HiveQL",
"bytes": "13"
},
{
"name": "Java",
"bytes": "381448"
},
{
"name": "Jupyter Notebook",
"bytes": "3148168"
},
{
"name": "Makefile",
"bytes": "108609"
},
{
"name": "Python",
"bytes": "991124"
},
{
"name": "R",
"bytes": "22072"
},
{
"name": "Ruby",
"bytes": "7046"
},
{
"name": "Shell",
"bytes": "119856"
},
{
"name": "TSQL",
"bytes": "5817"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
# CHANGED manage.py will use development settings by
# default. Change the DJANGO_SETTINGS_MODULE environment variable
# for using the environment specific settings file.
# os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wishlister.settings.development")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| {
"content_hash": "e6310216e416d8068d2f89113cd70461",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 88,
"avg_line_length": 35.666666666666664,
"alnum_prop": 0.7383177570093458,
"repo_name": "DustinHolden/wishlister",
"id": "6a4836cf49f2a400cabc76ed9a25f680495e5a4d",
"size": "450",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/manage.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3378"
},
{
"name": "HTML",
"bytes": "17221"
},
{
"name": "JavaScript",
"bytes": "363"
},
{
"name": "Python",
"bytes": "33826"
}
],
"symlink_target": ""
} |
from tempest.api.identity import base
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
class ServicesTestJSON(base.BaseIdentityV3AdminTest):
"""Test keystone services"""
def _del_service(self, service_id):
# Used for deleting the services created in this class
self.services_client.delete_service(service_id)
# Checking whether service is deleted successfully
self.assertRaises(lib_exc.NotFound, self.services_client.show_service,
service_id)
@decorators.attr(type='smoke')
@decorators.idempotent_id('5193aad5-bcb7-411d-85b0-b3b61b96ef06')
def test_create_update_get_service(self):
"""Test creating, updating and getting of keystone service"""
# Creating a Service
name = data_utils.rand_name('service')
serv_type = data_utils.rand_name('type')
desc = data_utils.rand_name('description')
create_service = self.services_client.create_service(
type=serv_type, name=name, description=desc)['service']
self.addCleanup(self._del_service, create_service['id'])
self.assertIsNotNone(create_service['id'])
# Verifying response body of create service
expected_data = {'name': name, 'type': serv_type, 'description': desc}
self.assertDictContainsSubset(expected_data, create_service)
# Update description
s_id = create_service['id']
resp1_desc = create_service['description']
s_desc2 = data_utils.rand_name('desc2')
update_service = self.services_client.update_service(
s_id, description=s_desc2)['service']
resp2_desc = update_service['description']
self.assertNotEqual(resp1_desc, resp2_desc)
# Get service
fetched_service = self.services_client.show_service(s_id)['service']
resp3_desc = fetched_service['description']
self.assertEqual(resp2_desc, resp3_desc)
self.assertDictContainsSubset(update_service, fetched_service)
@decorators.idempotent_id('d1dcb1a1-2b6b-4da8-bbb8-5532ef6e8269')
def test_create_service_without_description(self):
"""Create a keystone service only with name and type"""
name = data_utils.rand_name('service')
serv_type = data_utils.rand_name('type')
service = self.services_client.create_service(
type=serv_type, name=name)['service']
self.addCleanup(self.services_client.delete_service, service['id'])
expected_data = {'name': name, 'type': serv_type}
self.assertDictContainsSubset(expected_data, service)
@decorators.idempotent_id('e55908e8-360e-439e-8719-c3230a3e179e')
def test_list_services(self):
"""Create, List, Verify and Delete Keystone Services"""
service_ids = list()
service_types = list()
for _ in range(3):
name = data_utils.rand_name(self.__class__.__name__ + '-Service')
serv_type = data_utils.rand_name(self.__class__.__name__ + '-Type')
create_service = self.services_client.create_service(
type=serv_type, name=name)['service']
self.addCleanup(self.services_client.delete_service,
create_service['id'])
service_ids.append(create_service['id'])
service_types.append(serv_type)
# List and Verify Services
services = self.services_client.list_services()['services']
fetched_ids = [service['id'] for service in services]
found = [s for s in fetched_ids if s in service_ids]
self.assertEqual(len(found), len(service_ids))
# Check that filtering by service type works.
for serv_type in service_types:
fetched_services = self.services_client.list_services(
type=serv_type)['services']
self.assertEqual(1, len(fetched_services))
self.assertEqual(serv_type, fetched_services[0]['type'])
| {
"content_hash": "8aa51518ce0daefe1c1b74a37b21b064",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 79,
"avg_line_length": 45.86363636363637,
"alnum_prop": 0.6479187314172448,
"repo_name": "cisco-openstack/tempest",
"id": "a649d2757021ee5df2b1983d8ea6920398103d1f",
"size": "4672",
"binary": false,
"copies": "1",
"ref": "refs/heads/proposed",
"path": "tempest/api/identity/admin/v3/test_services.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4431271"
},
{
"name": "Shell",
"bytes": "7435"
}
],
"symlink_target": ""
} |
import sys
import pytest
from fs import open_fs
from organize.filters import MacOSTags
@pytest.mark.skipif(sys.platform != "darwin", reason="runs only on macOS")
def test_macos_filter():
import macos_tags
tags = MacOSTags("Invoice (red)", "* (green)")
with open_fs("temp://", writeable=True, create=True) as temp:
temp.touch("My-Invoice.pdf")
path = temp.getsyspath("My-Invoice.pdf")
macos_tags.add(macos_tags.Tag("Invoice", macos_tags.Color.RED), file=path)
temp.touch("Another-File.pdf")
path = temp.getsyspath("Another-File.pdf")
macos_tags.add(macos_tags.Tag("Urgent", macos_tags.Color.GREEN), file=path)
temp.touch("Pic.jpg")
path = temp.getsyspath("Pic.jpg")
macos_tags.add(macos_tags.Tag("Pictures", macos_tags.Color.BLUE), file=path)
assert tags.run(fs=temp, fs_path="My-Invoice.pdf").matches
assert tags.run(fs=temp, fs_path="Another-File.pdf").matches
assert not tags.run(fs=temp, fs_path="Pic.jpg").matches
def test_macos_tags_matching():
tags = MacOSTags("Invoice (*)", "* (red)", "Test (green)")
assert tags.matches(["Invoice (none)"])
assert tags.matches(["Invoice (green)"])
assert not tags.matches(["Voice (green)"])
assert tags.matches(["Voice (red)"])
assert not tags.matches(["Test (blue)"])
assert tags.matches(["Test (green)"])
| {
"content_hash": "3c82f397880a0148a7488541191580c5",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 84,
"avg_line_length": 36.68421052631579,
"alnum_prop": 0.6456241032998565,
"repo_name": "tfeldmann/organize",
"id": "26c5197fab580d5ee029dd034b07372d6e1ad672",
"size": "1394",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/filters/test_macos_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "207831"
}
],
"symlink_target": ""
} |
import sublime
from sublime_plugin import WindowCommand
from ..tools import sampy_manager
class upiotSampyHelpCommand(WindowCommand):
def run(self):
sublime.set_timeout_async(sampy_manager.help, 0)
| {
"content_hash": "87afe7232c9135fd48e466b14bde1892",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 56,
"avg_line_length": 21.4,
"alnum_prop": 0.7663551401869159,
"repo_name": "gepd/uPiotMicroPythonTool",
"id": "c0556a81636e21390295f4379e557af886dbb369",
"size": "1389",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "commands/sampy_help.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "414475"
}
],
"symlink_target": ""
} |
from JumpScale import j
from JumpScale.portal.portal import exceptions
from JumpScale.servers.serverbase.Exceptions import RemoteException
import urllib.request
import urllib.parse
import urllib.error
import requests
class PortalRest():
def __init__(self, webserver):
self.ws = webserver
self.logger = j.logger.get("j.portal.tools")
def validate(self, auth, ctx):
if ctx.params == "":
msg = 'No parameters given to actormethod.'
ctx.start_response('400 Bad Request', [])
return False, msg
if auth and ctx.env['beaker.session']['user'] == 'guest':
msg = 'NO VALID AUTHORIZATION KEY GIVEN, use get param called key (check key probably auth error).'
ctx.start_response('401 Unauthorized', [])
return False, msg
params = self.ws.routes[ctx.path]['params']
for key, param in params.items():
if key not in ctx.params:
if param['optional']:
# means is optional
ctx.params[key] = param['default']
else:
raise exceptions.BadRequest('Param with name:%s is missing.' % key)
elif param['type'] == 'int' and not isinstance(ctx.params[key], (int, type(None))):
try:
ctx.params[key] = int(ctx.params[key])
except ValueError:
raise exceptions.BadRequest(
'Value of param %s not correct needs to be of type %s' %
(key, param['type']))
elif param['type'] == 'bool' and not isinstance(ctx.params[key], (bool, type(None))):
try:
ctx.params[key] = j.data.types.bool.fromString(ctx.params[key])
except ValueError:
raise exceptions.BadRequest(
'Value of param %s not correct needs to be of type %s' %
(key, param['type']))
return True, ""
def restPathProcessor(self, path):
"""
Function which parse a path, returning True or False depending on
successfull parsing, a error message and a dict of parameters.
When successfull the params dict contains the path elements otherwise it
contains if provided the actorname and appname.
"""
# self.logger.info("Process path %s" % path, 9)
params = {}
while path != "" and path[0] == "/":
path = path[1:]
while path != "" and path[-1] == "/":
path = path[:-1]
if path.strip() == "":
return (False, "Bad input path was empty. Format of url need to be http://$ipaddr/rest/$appname/$actorname/$actormetho?...", {})
paths = path.split("/")
if len(paths) < 3:
msginfo = "Format of url need to be http://$ipaddr/rest/$appname/$actorname/$actormethod?...\n\n"
if len(paths) > 0:
appname = paths[0]
else:
appname = ""
actor = ""
if len(paths) > 1:
actor = paths[1]
else:
actor = ""
params["appname"] = appname
params["actorname"] = actor
return (False, msginfo, params)
params["paths"] = paths
return (True, "", params)
def restextPathProcessor(self, path):
self.logger.info("Process path %s" % path, 9)
params = {}
while path != "" and path[0] == "/":
path = path[1:]
while path != "" and path[-1] == "/":
path = path[:-1]
if path.strip() == "":
return (False, "Bad input path was empty. Format of url need to be http://$ipaddr/restext/$appname/$modelname/$args", {})
paths = path.split("/")
if len(paths) < 2:
msginfo = "Format of url need to be http://$ipaddr/restext/$appname/$modelname/$args...\n\n"
if len(paths) > 0:
appname = paths[0]
else:
appname = ""
modelname = ""
if len(paths) > 1:
modelname = paths[1]
else:
modelname = ""
params["appname"] = appname
params["modelname"] = modelname
return (False, msginfo, params)
params["paths"] = paths
return (True, "", params)
def restRouter(self, env, start_response, path, paths, ctx, ext=False, routekey=None, human=False):
"""
does validaton & authorization
returns right route key
"""
if not routekey:
routekey = "%s_%s_%s" % (paths[0], paths[1], paths[2])
# self.logger.info("Execute %s %s" % (env["REMOTE_ADDR"], routekey))
routes = self.ws.routes
if routekey not in routes:
self.activateActor(paths[0], paths[1])
if routekey not in routes:
routekey = "GET_%s" % routekey
if routekey in routes:
if human:
ctx.fformat = "human"
elif("format" not in ctx.params):
ctx.fformat = routes[routekey]['returnformat']
else:
ctx.fformat = ctx.params["format"]
ctx.path = routekey
ctx.fullpath = path
ctx.application = paths[0]
ctx.actor = paths[1]
ctx.method = paths[2]
auth = routes[routekey]['auth']
resultcode, msg = self.validate(auth, ctx) # validation & authorization (but user needs to be known)
if resultcode is False:
if human:
params = {}
params["error"] = "Incorrect Request: %s" % msg
params["appname"] = ctx.application
params["actorname"] = ctx.actor
params["method"] = ctx.method
page = self.ws.pageprocessor.returnDoc(ctx, start_response, "system",
"restvalidationerror", extraParams=params)
return (False, ctx, [str(page)])
else:
return (False, ctx, msg)
else:
return (True, ctx, routekey)
else:
msg = "Could not find method, path was %s" % (path)
appname = paths[0]
actor = paths[1]
contentType, data = self.ws.pageprocessor.reformatOutput(ctx, msg, restreturn=not human)
ctx.start_response("404 Not Found", [('Content-Type', contentType)])
if human:
page = self.getServicesInfo(appname, actor)
return (False, ctx, self.ws.pageprocessor.raiseError(ctx=ctx, msg=msg, msginfo=str(page)))
else:
contentType, data = self.ws.pageprocessor.reformatOutput(ctx, msg, restreturn=False)
return (False, ctx, data)
def execute_rest_call(self, ctx, routekey, ext=False):
routes = self.ws.routes
try:
method = routes[routekey]['func']
result = method(ctx=ctx, **ctx.params)
return (True, result)
except RemoteException as error:
if error.eco.get('exceptionclassname') == 'KeyError':
data = error.eco['data'] or {'categoryname': 'unknown', 'key': '-1'}
raise exceptions.NotFound("Could not find %(key)s of type %(categoryname)s" % data)
raise
except requests.exceptions.ConnectionError as error:
message = error.args[0]
raise exceptions.Error(message)
except Exception as errorObject:
eco = j.errorconditionhandler.processPythonExceptionObject(errorObject)
msg = "Execute method %s failed." % (routekey)
return (False, self.ws.pageprocessor.raiseError(ctx=ctx, msg=msg, errorObject=eco))
def processor_rest(self, env, start_response, path, human=True, ctx=False):
"""
orignal rest processor (get statements)
e.g. http://localhost/restmachine/system/contentmanager/notifySpaceModification?name=www_openvstorage&authkey=1234
"""
if ctx is False:
raise RuntimeError("ctx cannot be empty")
try:
# self.logger.info("Routing request to %s" % path, 9)
def respond(contentType, msg):
# self.logger.info("Responding %s" % msg, 5)
if contentType:
ctx.start_response('200 OK', [('Content-Type', contentType)])
# print msg
return msg
success, msg, params = self.restPathProcessor(path)
if not success:
params["error"] = msg
if human:
page = self.ws.pageprocessor.returnDoc(ctx, start_response, "system", "rest",
extraParams=params)
return [str(page)]
else:
httpcode = "404 Not Found"
contentType, data = self.ws.pageprocessor.reformatOutput(ctx, msg, restreturn=True)
ctx.start_response(httpcode, [('Content-Type', contentType)])
return data
paths = params['paths']
success, ctx, routekey = self.restRouter(env, start_response, path,
paths, ctx, human=human)
if not success:
# in this case routekey is really the errormsg
return routekey
success, result = self.execute_rest_call(ctx, routekey)
if not success:
return result
if human:
ctx.format = "json"
params = {}
params["result"] = result
return [str(self.ws.pageprocessor.returnDoc(
ctx, start_response, "system", "restresult", extraParams=params))]
else:
contentType, result = self.ws.pageprocessor.reformatOutput(ctx, result)
return respond(contentType, result)
except Exception as errorObject:
eco = j.errorconditionhandler.processPythonExceptionObject(errorObject)
if ctx is False:
print("NO webserver context yet, serious error")
eco.process()
print(eco)
else:
return self.ws.pageprocessor.raiseError(ctx, errorObject=eco)
def processor_restext(self, env, start_response, path, human=True, ctx=False):
"""
rest processer gen 2 (not used by the original get code)
"""
if ctx is False:
raise RuntimeError("ctx cannot be empty")
try:
self.logger.info("Routing request to %s" % path, 9)
def respond(contentType, msg):
if contentType:
start_response('200 OK', [('Content-Type', contentType)])
return msg
success, message, params = self.restextPathProcessor(path)
if not success:
params["error"] = message
if human:
page = self.ws.pageprocessor.returnDoc(ctx, start_response, "system", "rest",
extraParams=params)
return [str(page)]
else:
return self.ws.pageprocessor.raiseError(ctx, message)
requestmethod = ctx.env['REQUEST_METHOD']
paths = params['paths']
appname = paths[0]
model = paths[1]
objectid = None
if len(paths) > 2:
objectid = int(paths[2])
ctx.params['id'] = objectid
osiscl = j.clients.osis.getCategory(self.ws.osis, appname, model)
osismap = {'GET': ['get', 'list', 'search'], 'POST': [''], 'DELETE': ['delete']}
if requestmethod == 'GET':
result = self._handle_get(ctx, osiscl, objectid)
elif requestmethod in ('POST', 'PUT'):
result = self._handle_post(ctx, osiscl, objectid)
elif requestmethod == 'DELETE':
result = self._handle_delete(ctx, osiscl, objectid)
else:
start_response('405 Method not allowed', [('Content-Type', 'text/html')])
return 'Requested method is not allowed'
if human:
ctx.fformat = "json"
params = {}
params["result"] = result
return [str(self.ws.pageprocessor.returnDoc(
ctx, start_response, "system", "restresult", extraParams=params))]
else:
ctx.fformat = 'jsonraw'
contentType, result = self.ws.pageprocessor.reformatOutput(ctx, result)
return respond(contentType, result)
except Exception as errorObject:
eco = j.errorconditionhandler.processPythonExceptionObject(errorObject)
if ctx is False:
print("NO webserver context yet, serious error")
eco.process()
print(eco)
else:
return self.ws.pageprocessor.raiseError(ctx, errorObject=eco)
def _handle_get(self, ctx, osiscl, objectid):
if objectid: # get object
result = osiscl.get(objectid)
return result.dump()
else: # list or search
if ctx.env['QUERY_STRING']: # search
query = self._get_query_string(ctx)
return osiscl.search(query)[1:]
else: # list
return osiscl.search({})[1:]
def _handle_delete(self, ctx, osiscl, objectid):
return osiscl.delete(objectid)
def _handle_post(self, ctx, osiscl, objectid):
fields = ctx.params
if objectid: # update
obj = osiscl.get(objectid)
else: # new
obj = osiscl.new()
if 'id' in fields:
fields.pop('id')
for field, value in fields.items():
setattr(obj, field, value)
return osiscl.set(obj)
def _get_query_string(self, ctx):
fields = dict()
if ctx.env['QUERY_STRING']:
queryparts = ctx.env['QUERY_STRING'].split('&')
for querypart in queryparts:
querypart = urllib.parse.unquote(querypart)
field, value = querypart.split('=')
fields[field] = int(value) if value.isdigit() else value
return fields
def activateActor(self, appname, actor):
if not "%s_%s" % (appname, actor) in list(self.ws.actors.keys()):
# need to activate
try:
result = self.ws.actorsloader.getActor(appname, actor)
except Exception as e:
eco = j.errorconditionhandler.processPythonExceptionObject(e)
eco.process()
print(eco)
return False
if result is None:
# there was no actor
return False
| {
"content_hash": "0aa8c96d5f57dcd9156d3cae7b176490",
"timestamp": "",
"source": "github",
"line_count": 359,
"max_line_length": 140,
"avg_line_length": 42.250696378830085,
"alnum_prop": 0.522745253164557,
"repo_name": "Jumpscale/jumpscale_portal8",
"id": "4ec96e1d7bf48a85479caa4ae3219968c0d915f0",
"size": "15168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/portal/portal/PortalRest.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "482591"
},
{
"name": "HTML",
"bytes": "313255"
},
{
"name": "JavaScript",
"bytes": "8815099"
},
{
"name": "PHP",
"bytes": "205758"
},
{
"name": "Python",
"bytes": "974012"
},
{
"name": "Ruby",
"bytes": "28925"
},
{
"name": "Shell",
"bytes": "291"
}
],
"symlink_target": ""
} |
import sys
import json
import archive_api
argl = len(sys.argv)
if argl > 3:
print "null"
else:
query = archive_api.archive(sys.argv[1],sys.argv[2])
query = json.dumps(query)
print query | {
"content_hash": "e20a9fc99c7e4453272c3b3b556728e4",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 56,
"avg_line_length": 16.833333333333332,
"alnum_prop": 0.6732673267326733,
"repo_name": "alexjstubbs/ignition-dev",
"id": "0c361389dd344b99753c36d773e0b2752b121f1f",
"size": "275",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "local/api/database/py/archive_api_call.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "78"
},
{
"name": "CSS",
"bytes": "112738"
},
{
"name": "HTML",
"bytes": "529193"
},
{
"name": "JavaScript",
"bytes": "2536182"
},
{
"name": "Makefile",
"bytes": "4863"
},
{
"name": "Python",
"bytes": "1564"
},
{
"name": "Shell",
"bytes": "9835"
}
],
"symlink_target": ""
} |
import inspect
import os
import jinja2
import licenses
from class_definition import ClassDefinition, AttrDefinition, PackageDefinition, FunctionDefinition
from mcfw.consts import MISSING
from mcfw.properties import simple_types, get_members, object_factory
from .custom import filters, csharp_filters, java_filters, objc_filters
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader([os.path.join(os.path.dirname(__file__), 'templates')]))
for module in (filters, csharp_filters, java_filters, objc_filters):
for name, func in inspect.getmembers(module, lambda x: inspect.isfunction(x)):
JINJA_ENVIRONMENT.filters[name] = func
DELIMITER = '*' * 50
def _get_collection_type(type_):
return isinstance(type_, list) and list.__name__ or None
def _get_type_string(type_):
real_type = _get_real_type(type_)
if real_type in simple_types:
return real_type.__name__
else:
return '%s.%s' % (real_type.__module__, real_type.__name__)
def _get_real_type(type_):
if (type_ == str):
raise RuntimeError('str type not allowed (use unicode)')
if (type_ in [(str, unicode), (unicode, str)]):
raise RuntimeError('str+unicode tuple type found (use unicode)')
if isinstance(type_, (list, tuple)):
return type_[0]
return type_
def _sort_values_by_keys(mapping):
return [mapping[k] for k in sorted(mapping.keys())]
def populate_class_def_fields(class_def, type_, prop, name):
type_name = _get_type_string(type_)
collection = list.__name__ if prop.list else None
attr_def = AttrDefinition(
name=name, type_=type_name, collection_type=collection, doc=prop.doc, default=prop.default)
class_def.fields.append(attr_def)
def build_class_definition(type_, stash):
complex_props, simple_props = get_members(type_)
for (_, prop) in complex_props:
if isinstance(prop.type, object_factory):
continue # XXX: implement when needed
process_type(prop.type, stash)
class_def = ClassDefinition(package=type_.__module__, name=type_.__name__, doc=type_.__doc__)
for (name, prop) in (complex_props + simple_props):
if isinstance(prop.type, object_factory):
continue # XXX: implement when needed
populate_class_def_fields(class_def, prop.type, prop, name)
return class_def
def process_type(type_, stash):
type_ = _get_real_type(type_)
if type_ in simple_types or type_ in stash:
return
class_def = build_class_definition(type_, stash)
stash[_get_type_string(type_)] = class_def
def check_function_validity(f, max_argument_count):
if not hasattr(f, "meta") or "kwarg_types" not in f.meta or "return_type" not in f.meta:
raise ValueError("Cannot inspect function %s. Meta data is missing" % f)
if _get_collection_type(f.meta.get('return_type')) == list.__name__:
raise ValueError('List return type not supported')
from custom.filters import SIMPLE_TYPES
if f.meta.get('return_type') in SIMPLE_TYPES:
raise ValueError("Only TOs are supported as return type")
if len(f.meta.get("kwarg_types")) > max_argument_count:
raise ValueError("Only %s argument(s) allowed" % max_argument_count)
def process_function(f, stash, max_argument_count):
check_function_validity(f, max_argument_count)
# process return type
process_type(f.meta["return_type"], stash)
# process argument types
for kwarg_type in f.meta["kwarg_types"].itervalues():
process_type(kwarg_type, stash)
def generate_TOs(mapping, client_mapping, max_argument_count):
tos = dict()
all_funcs = dict()
all_funcs.update(mapping)
all_funcs.update(client_mapping)
for f in all_funcs.itervalues():
process_function(f, tos, max_argument_count)
return _sort_values_by_keys(tos)
def generate_CAPI_packages(capi_functions, max_argument_count):
return generate_packages(capi_functions, max_argument_count)
def generate_API_packages(api_functions, max_argument_count):
return generate_packages(api_functions, max_argument_count)
# TODO: should refactor and reuse the type analysis code in this method and in build_class_definition
def generate_packages(functions, max_argument_count):
stash = dict()
for full_function_name in sorted(functions.keys()):
f = functions[full_function_name]
check_function_validity(f, max_argument_count)
package, short_function_name = full_function_name.rsplit('.', 1)
if (package not in stash):
stash[package] = PackageDefinition(package)
func = FunctionDefinition(short_function_name)
stash[package].functions.append(func)
arg_list = f.meta.get('fargs')[0]
arg_dict = f.meta.get('kwarg_types')
for arg in arg_list:
arg_def = AttrDefinition(arg, _get_type_string(arg_dict[arg]), _get_collection_type(arg_dict[arg]))
func.args.append(arg_def)
func.rtype = AttrDefinition(type_=_get_type_string(f.meta.get('return_type')))
return _sort_values_by_keys(stash)
def render(tos, api_packages, capi_packages, target):
license_text = _read_file(
os.path.join(os.path.dirname(__file__), "..", "..", "..", "tools", "change_license", "gig_apache_license_header.tmpl"))
path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
tmpl = '%s.tmpl' % target
license_string = licenses.get_license(license_text, target)
context = {'DELIMITER': DELIMITER, "LICENSE": license_string, 'tos': tos, 'CS_API_packages': api_packages,
'SC_API_packages': capi_packages, 'path': path, 'MISSING': MISSING}
jinja_template = JINJA_ENVIRONMENT.get_template(tmpl)
gen_content = jinja_template.render(context)
# _write_file(gen_content, "%s.gen.tmp" % os.path.join(os.path.dirname(__file__), target))
_process_gen_file(gen_content, path)
def _process_gen_file(gen_content, path):
current_file = None
current_content = list()
for line in gen_content.splitlines():
if line == DELIMITER:
if current_file:
_write_file('\n'.join(current_content), current_file)
current_file = None
current_content = list()
elif not current_file:
if not line:
continue
current_file = os.path.join(path, line)
else:
current_content.append(line)
def _write_file(content, file_name):
path = os.path.dirname(file_name)
if path and not os.path.exists(path):
os.makedirs(path)
f = open(file_name, "w")
try:
line_sep = os.linesep
if content[-1] != line_sep:
content += line_sep
f.write(content)
finally:
f.close()
print "gen file: %s" % file_name
def _read_file(file_name):
if not os.path.exists(file_name):
raise RuntimeError("File '%s' does not exist" % os.path.abspath(file_name))
f = open(file_name, "r")
try:
return f.read()
finally:
f.close()
def generate(target, mapping, client_mapping, max_argument_count=1):
print "generating", target
render(generate_TOs(mapping, client_mapping, max_argument_count),
generate_API_packages(mapping, max_argument_count),
generate_CAPI_packages(client_mapping, max_argument_count),
target)
| {
"content_hash": "17e88f89ddff39fb355c1e62aeed4071",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 127,
"avg_line_length": 34.990566037735846,
"alnum_prop": 0.6528713939067134,
"repo_name": "rogerthat-platform/rogerthat-backend",
"id": "8b05c3e69a031cae8f2703cb3b8aa2f8682a3c81",
"size": "8051",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src-generator/generator/generator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "687088"
},
{
"name": "HTML",
"bytes": "948569"
},
{
"name": "Java",
"bytes": "521272"
},
{
"name": "JavaScript",
"bytes": "1830068"
},
{
"name": "Python",
"bytes": "4220314"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Entity'
db.create_table(u'bazar_entity', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('kind', self.gf('django.db.models.fields.CharField')(default='customer', max_length=40)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
('adress', self.gf('django.db.models.fields.TextField')(blank=True)),
('phone', self.gf('django.db.models.fields.CharField')(max_length=15, blank=True)),
('town', self.gf('django.db.models.fields.CharField')(max_length=75, blank=True)),
('zipcode', self.gf('django.db.models.fields.CharField')(max_length=6, blank=True)),
))
db.send_create_signal(u'bazar', ['Entity'])
# Adding model 'Note'
db.create_table(u'bazar_note', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('entity', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bazar.Entity'], null=True, blank=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=150)),
('content', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'bazar', ['Note'])
def backwards(self, orm):
# Deleting model 'Entity'
db.delete_table(u'bazar_entity')
# Deleting model 'Note'
db.delete_table(u'bazar_note')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'bazar.entity': {
'Meta': {'object_name': 'Entity'},
'adress': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'default': "'customer'", 'max_length': '40'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'town': ('django.db.models.fields.CharField', [], {'max_length': '75', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'})
},
u'bazar.note': {
'Meta': {'object_name': 'Note'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'entity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bazar.Entity']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['bazar'] | {
"content_hash": "77834c33cd5d6c0fef02eb3db0705de9",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 187,
"avg_line_length": 67.82352941176471,
"alnum_prop": 0.5608970387808202,
"repo_name": "emencia/emencia-django-bazar",
"id": "70311f36ad9ee3855a7995eed62cc700d2720f90",
"size": "8095",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bazar/south_migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "340534"
},
{
"name": "HTML",
"bytes": "12135"
},
{
"name": "JavaScript",
"bytes": "11842"
},
{
"name": "Makefile",
"bytes": "995"
},
{
"name": "Python",
"bytes": "83916"
},
{
"name": "Ruby",
"bytes": "981"
}
],
"symlink_target": ""
} |
note1 = float(input('primeira nota'))
note2 = float(input('segunda nota'))
average = (note1*2 + note2*3) / 5
print(average)
| {
"content_hash": "2f511e4e9d70ddc3b6e1e2bad79b7e13",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 37,
"avg_line_length": 26,
"alnum_prop": 0.6538461538461539,
"repo_name": "jucimarjr/IPC_2017-1",
"id": "7c6ce1f995faf902700a5d9eacfc62c9e757358b",
"size": "864",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lista1.5/lista1.5_questao04.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "2978"
},
{
"name": "Python",
"bytes": "525677"
}
],
"symlink_target": ""
} |
""" 1d array of prediction values with properties (labels, reference to the predictor)
"""
import numpy
from pySPACE.resources.data_types import base
class PredictionVector(base.BaseData):
""" Represents a prediction vector
It contains a label, a prediction and a reference to the predictor.
I doesn't matter if it uses one or multiple predictions.
The object might be even used for regression, where no label is needed.
In contrast to :class:`~pySPACE.resources.data_types.time_series.TimeSeries`
or :class:`~pySPACE.resources.data_types.feature_vector.FeatureVector`
objects, prediction vectors are currently generated in a node chain
with classifiers for example and not loaded.
For evaluation the
:class:`~pySPACE.missions.nodes.sink.classification_performance_sink.PerformanceSinkNode`
can be used to evaluate the predictions.
For multiple predictions, nodes from the
:mod:`~pySPACE.missions.nodes.classification.ensemble`
module can be used.
For creating a prediction vector, there are four
**Parameters**
:input_array:
The prediction vector is (for historical reasons) a 2d numpy array
with some additional (mode important parameters).
The content of the input_array should be/is the same
as used in the *prediction* parameter.
If you do not specify this parameter, it is generated from
the *prediction* and vice versa.
Any object, which can be converted to a 2d-numpy array can be
used to specify this parameter.
:label:
The label normally gives a semantic meaning to the prediction value
and is a string, e.g., "ill" or "Target".
For regression this parameter can be ignored and is set to None.
For multiple predictions, it is a list.
:prediction:
For regression, this is the regression value and
for binary classification it is the prediction value.
For SVMs it can be any real value and for algorithms
with probabilistic output it should be the probability
of the respective data belonging to the second and not the first
class or vice versa.
For multiple predictions this is not a single number,
but a list of floats.
The prediction value is used to generate the *input_array*
parameter or vice versa.
:predictor:
For accessing special parameters of the decision algorithm,
this parameter is used (default: None).
It is typically a pointer to the Node, which created the vector.
For multiple predictions, a list might be used, which might be
replaced during the processing by an ensemble classifier.
One main usage is when reading out additional metrics in the
evaluation process like convergence behaviour or weights of
a linear classifier.
The last 3 parameters are directly to object variables with the same name.
Currently, the object is by default like an array, with access to
the different other parameters.
For future developments, only these parameters should be used.
.. todo:: Implement a method _generate_tag for BaseData type (if desired)
.. todo:: Eliminate 2d-array behaviour incl. modifications in some nodes
:Author: Mario Micheal Krell
:Created: 2010/07/28
"""
def __new__(subtype, input_array=None, label=None, prediction=None,
predictor=None, tag=None, **kwargs):
""" Create the object including several type mappings """
# Input array is not an already formed ndarray instance
# We first cast to be our class type
if input_array is None:
if type(prediction) == list:
input_array = [prediction]
elif type(prediction) == numpy.ndarray:
input_array = numpy.atleast_2d(prediction)
elif prediction is None:
raise TypeError(
"You should at least give a prediction value " +
"of 1 or -1 in the input array or the prediction component")
else:
if type(prediction) == numpy.float64:
pass
elif type(prediction) == float:
prediction = numpy.float64(prediction)
elif type(prediction) == int or type(prediction) == numpy.int64:
prediction *= 1.0
else:
import warnings
warnings.warn("Type mismatch in Prediction Vector: %s!"%type(prediction))
prediction = float(prediction)
input_array = [[prediction]]
if not numpy.isfinite(input_array).all():
if type(prediction) == list:
input_array = [0 for i in range(len(prediction))]
elif prediction > 0:
prediction = 10**9
input_array = [[float(prediction)]]
else:
prediction = -10**9
input_array = [[float(prediction)]]
obj = base.BaseData.__new__(subtype, input_array)
# add subclasses attributes to the created instance
# obj.feature_names = ["prediction value"]
obj.label = label
obj.predictor = predictor
# using the input array is not necessary any more
if prediction is None:
l = list(input_array[0])
if len(l) == 1:
obj.prediction = l[0]
else:
obj.prediction = l
else:
obj.prediction = prediction
if not tag is None:
obj.tag = tag
# Finally, we must return the newly created object:
return obj
def __array_finalize__(self, obj):
super(PredictionVector, self).__array_finalize__(obj)
# set default values for attributes, since normally they are not needed
# when taking just the values
if not (obj is None) and not (type(obj) == numpy.ndarray):
# reset the attributes from passed original object
self.label = getattr(obj, 'label', None)
self.predictor = getattr(obj, 'predictor', None)
self.prediction = getattr(obj, 'prediction', None)
else:
self.label = None
self.predictor = None
self.prediction = None
# which is a good printing format? "label, value"?
def __str__(self):
str_repr = ""
if hasattr(self.label, "__iter__"):
for label, prediction in zip(self.label, self.prediction):
str_repr += "%s : %.4f \t" % (label, prediction)
else:
str_repr += "%s : %.4f \t" % (self.label, self.prediction)
return str_repr
def __reduce__(self):
""" Refer to
http://www.mail-archive.com/numpy-discussion@scipy.org/msg02446.html#
for infos about pickling ndarray subclasses
"""
object_state = list(super(PredictionVector, self).__reduce__())
subclass_state = (self.label, self.predictor, self.prediction)
object_state[2].append(subclass_state)
object_state[2] = tuple(object_state[2])
return tuple(object_state)
def __setstate__(self, state):
nd_state, base_state, own_state = state
super(PredictionVector, self).__setstate__((nd_state, base_state))
(self.label, self.predictor, self.prediction) = own_state
def __eq__(self, other):
""" Same label and prediction value """
if type(other) != type(self):
return False
return (self.label == other.label and
numpy.allclose(self.prediction, other.prediction))
| {
"content_hash": "efa7757d00fd6b736043ee7c3bd3274c",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 93,
"avg_line_length": 42.58918918918919,
"alnum_prop": 0.6062952151288235,
"repo_name": "pyspace/pyspace",
"id": "814fc48a425306f4ad27028e9ef96893a4c09a89",
"size": "7879",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pySPACE/resources/data_types/prediction_vector.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "11128"
},
{
"name": "C++",
"bytes": "309606"
},
{
"name": "Matlab",
"bytes": "3768"
},
{
"name": "Python",
"bytes": "3160853"
},
{
"name": "QMake",
"bytes": "3217"
},
{
"name": "Shell",
"bytes": "253"
}
],
"symlink_target": ""
} |
"""A filled polygon component"""
# Major package imports.
from numpy import array
# Enthought library imports.
from kiva.constants import EOF_FILL_STROKE, FILL, FILL_STROKE
from kiva.agg import points_in_polygon
from traits.api import Any, Event, Float, HasTraits, Instance, List, \
Property, Trait, Tuple
from traitsui.api import Group, View
# Local imports.
from enable.api import border_size_trait, Component
from enable.colors import ColorTrait
class PolygonModel(HasTraits):
""" The data model for a Polygon. """
# The points that make up the vertices of this polygon.
points = List(Tuple)
def reset(self):
self.points = []
return
class Polygon(Component):
""" A filled polygon component. """
#--------------------------------------------------------------------------
# Trait definitions.
#--------------------------------------------------------------------------
# The background color of this polygon.
background_color = ColorTrait("white")
# The color of the border of this polygon.
border_color = ColorTrait("black")
# The dash pattern to use for this polygon.
border_dash = Any
# The thickness of the border of this polygon.
border_size = Trait(1, border_size_trait)
# Event fired when the polygon is "complete".
complete = Event
# The rule to use to determine the inside of the polygon.
inside_rule = Trait('winding',
{'winding':FILL_STROKE, 'oddeven':EOF_FILL_STROKE })
# The points that make up this polygon.
model = Instance(PolygonModel, ())
# Convenience property to access the model's points.
points = Property
# The color of each vertex.
vertex_color = ColorTrait("black")
# The size of each vertex.
vertex_size = Float(3.0)
traits_view = View(Group('<component>', id = 'component'),
Group('<links>', id = 'links'),
Group('background_color', '_',
'border_color', '_',
'border_size',
id = 'Box',
style = 'custom'))
colorchip_map = {'color': 'color', 'alt_color': 'border_color'}
#--------------------------------------------------------------------------
# Traits property accessors
#--------------------------------------------------------------------------
def _get_points(self):
return self.model.points
#--------------------------------------------------------------------------
# 'Polygon' interface
#--------------------------------------------------------------------------
def reset(self):
"Reset the polygon to the initial state"
self.model.reset()
self.event_state = 'normal'
return
#--------------------------------------------------------------------------
# 'Component' interface
#--------------------------------------------------------------------------
def _draw_mainlayer(self, gc, view_bounds=None, mode="normal"):
"Draw the component in the specified graphics context"
self._draw_closed(gc)
return
#--------------------------------------------------------------------------
# Protected interface
#--------------------------------------------------------------------------
def _is_in(self, point):
""" Test if the point (an x, y tuple) is within this polygonal region.
To perform the test, we use the winding number inclusion algorithm,
referenced in the comp.graphics.algorithms FAQ
(http://www.faqs.org/faqs/graphics/algorithms-faq/) and described in
detail here:
http://softsurfer.com/Archive/algorithm_0103/algorithm_0103.htm
"""
point_array = array((point,))
vertices = array(self.model.points)
winding = self.inside_rule == 'winding'
result = points_in_polygon(point_array, vertices, winding)
return result[0]
#--------------------------------------------------------------------------
# Private interface
#--------------------------------------------------------------------------
def _draw_closed(self, gc):
"Draw this polygon as a closed polygon"
if len(self.model.points) > 2:
# Set the drawing parameters.
gc.set_fill_color(self.background_color_)
gc.set_stroke_color(self.border_color_)
gc.set_line_width(self.border_size)
gc.set_line_dash(self.border_dash)
# Draw the path.
gc.begin_path()
gc.move_to(self.model.points[0][0] - self.x,
self.model.points[0][1] + self.y)
offset_points = [(x - self.x, y + self.y)
for x, y in self.model.points]
gc.lines(offset_points)
gc.close_path()
gc.draw_path(self.inside_rule_)
# Draw the vertices.
self._draw_vertices(gc)
return
def _draw_open ( self, gc ):
"Draw this polygon as an open polygon"
if len(self.model.points) > 2:
# Set the drawing parameters.
gc.set_fill_color( self.background_color_ )
gc.set_stroke_color( self.border_color_ )
gc.set_line_width( self.border_size )
gc.set_line_dash( self.border_dash )
# Draw the path.
gc.begin_path()
gc.move_to(self.model.points[0][0] - self.x,
self.model.points[0][1] + self.y)
offset_points = [(x - self.x, y + self.y) for x, y in self.model.points ]
gc.lines(offset_points)
gc.draw_path(self.inside_rule_)
# Draw the vertices.
self._draw_vertices(gc)
return
def _draw_vertices(self, gc):
"Draw the vertices of the polygon."
gc.set_fill_color(self.vertex_color_)
gc.set_line_dash(None)
offset = self.vertex_size / 2.0
offset_points = [(x + self.x, y + self.y)
for x, y in self.model.points]
if hasattr(gc, 'draw_path_at_points'):
path = gc.get_empty_path()
path.rect(-offset, -offset, self.vertex_size, self.vertex_size)
gc.draw_path_at_points(offset_points, path, FILL_STROKE)
else:
for x, y in offset_points:
gc.draw_rect((x - offset, y - offset,
self.vertex_size, self.vertex_size), FILL)
return
# EOF
| {
"content_hash": "3bbdff9c8f4a2f0fe6b9cc5f3086b6d9",
"timestamp": "",
"source": "github",
"line_count": 198,
"max_line_length": 85,
"avg_line_length": 33.45959595959596,
"alnum_prop": 0.4892075471698113,
"repo_name": "tommy-u/enable",
"id": "25bd10c88ff5357f5851cdef24e581d5435f24ce",
"size": "6625",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enable/primitives/polygon.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "240"
},
{
"name": "C",
"bytes": "5526949"
},
{
"name": "C++",
"bytes": "3058044"
},
{
"name": "DIGITAL Command Language",
"bytes": "35819"
},
{
"name": "Groff",
"bytes": "236"
},
{
"name": "Makefile",
"bytes": "58238"
},
{
"name": "Objective-C",
"bytes": "16551"
},
{
"name": "Python",
"bytes": "2202660"
},
{
"name": "Shell",
"bytes": "6286"
}
],
"symlink_target": ""
} |
from datetime import datetime, timezone, tzinfo, timedelta
class TaipeiTimeZone(tzinfo):
offset = timedelta(hours=8)
def utcoffset(self, dt):
return self.offset
def tzname(self, dt):
return "台北標準時間"
def dst(self, dt):
return self.offset
def fromutc(self, dt):
dtoff = dt.utcoffset()
dtdst = dt.dst()
delta = dtoff - dtdst
if delta:
dt += delta
dtdst = dt.dst()
return dt + dtdst if dtdst else dt
def __repr__(self):
return "台北標準時間"
def local_datetime(utc_datetime, tz):
local_date = datetime.strptime(utc_datetime, "%Y-%m-%dT%H:%M:%S.%fZ")
return local_date + tz.offset
def tz_object(utc_date, tz):
localdate = datetime.strptime(utc_date, "%Y-%m-%dT%H:%M:%S.%fZ")
localdate = localdate.replace(tzinfo=timezone.utc).astimezone(tz=tz)
strlocaldate = localdate.strftime('%Y/%m/%d %H:%M:%S')
return strlocaldate, localdate
def today_times(local_dates):
count = 0
for date in local_dates:
if date.date() == datetime.now(TaipeiTimeZone()).date():
count += 1
return count
def pprint(content):
import pprint
pprint.pprint(content)
if __name__ == '__main__':
t = datetime(2014, 12, 21, 12, 56, 39, 204000)
t = '2015-1-19T23:55:14.965Z'
t = local_datetime(t, TaipeiTimeZone())
print(t)
print(datetime.now(TaipeiTimeZone()))
c = today_times([t])
print(c)
| {
"content_hash": "17fcb2aa45d711392d6afcdd98b8d238",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 73,
"avg_line_length": 29.24,
"alnum_prop": 0.6032831737346102,
"repo_name": "leVirve/lol-uccu",
"id": "0a18e8b10de551f7e0b0a5fbbbd5c58998346645",
"size": "1486",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model/util_method.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2191845"
},
{
"name": "JavaScript",
"bytes": "14660"
},
{
"name": "Python",
"bytes": "7879"
},
{
"name": "Shell",
"bytes": "139"
}
],
"symlink_target": ""
} |
__all__ = ["DefaultPool", "InterruptiblePool", "JoblibPool"]
from .default import DefaultPool
from .interruptible import InterruptiblePool
from .jl import JoblibPool
| {
"content_hash": "6c31670e42a098eda729ed4ae7248e47",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 60,
"avg_line_length": 33.4,
"alnum_prop": 0.7844311377245509,
"repo_name": "jellis18/emcee3",
"id": "d6cce8f8bf1bdc625820c1077bca3bc2b836998d",
"size": "192",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "emcee3/pools/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "914"
},
{
"name": "Python",
"bytes": "79045"
},
{
"name": "TeX",
"bytes": "181310"
}
],
"symlink_target": ""
} |
"""
"""
import sympy
from Chapter2.themes.lisp_list_structured_data import car, cdr, lisp_list, nil
from Chapter2.themes.mapping_over_lists import map
def quote(symbols):
"""Python does not support symbolic data by design so we use sympy to
emulate the behaviour"""
if isinstance(symbols, str):
try:
int(symbols)
except ValueError:
try:
float(symbols)
except ValueError:
return sympy.var(symbols)
else:
return sympy.RealNumber(symbols)
else:
return sympy.Integer(symbols)
return map(quote, symbols)
def symbol_equal(symbol1, symbol2):
"""Tests wether or not the two symbols are equal"""
try:
return symbol1.name == symbol2.name
except AttributeError:
return False
def memq(item, x):
if x is nil:
return False
elif symbol_equal(item, car(x)):
return x
else:
return memq(item, cdr(x))
def run_the_magic():
print(memq(
quote('apple'),
quote(lisp_list('pear', 'banana', 'prune'))
))
print(memq(
quote('apple'),
quote(lisp_list(
'x',
lisp_list(
'apple',
'sauce',
),
'y',
'apple',
'pear',
))
))
if __name__ == '__main__':
run_the_magic()
| {
"content_hash": "26d20cf8bd4e048a2e941791ea286592",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 78,
"avg_line_length": 21.515151515151516,
"alnum_prop": 0.5232394366197183,
"repo_name": "aoyono/sicpy",
"id": "afc62aa9fe45d223465be96ee3334eaa8e22c5e9",
"size": "1444",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Chapter2/themes/symbolic_data.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "229644"
}
],
"symlink_target": ""
} |
"""
homeassistant.components.sensor.tellduslive
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Shows sensor values from Tellstick Net/Telstick Live.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.tellduslive/
"""
import logging
from datetime import datetime
from homeassistant.components import tellduslive
from homeassistant.const import (
ATTR_BATTERY_LEVEL, DEVICE_DEFAULT_NAME, TEMP_CELCIUS)
from homeassistant.helpers.entity import Entity
ATTR_LAST_UPDATED = "time_last_updated"
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPE_TEMP = "temp"
SENSOR_TYPE_HUMIDITY = "humidity"
SENSOR_TYPE_RAINRATE = "rrate"
SENSOR_TYPE_RAINTOTAL = "rtot"
SENSOR_TYPE_WINDDIRECTION = "wdir"
SENSOR_TYPE_WINDAVERAGE = "wavg"
SENSOR_TYPE_WINDGUST = "wgust"
SENSOR_TYPE_WATT = "watt"
SENSOR_TYPES = {
SENSOR_TYPE_TEMP: ['Temperature', TEMP_CELCIUS, "mdi:thermometer"],
SENSOR_TYPE_HUMIDITY: ['Humidity', '%', "mdi:water"],
SENSOR_TYPE_RAINRATE: ['Rain rate', 'mm', "mdi:water"],
SENSOR_TYPE_RAINTOTAL: ['Rain total', 'mm', "mdi:water"],
SENSOR_TYPE_WINDDIRECTION: ['Wind direction', '', ""],
SENSOR_TYPE_WINDAVERAGE: ['Wind average', 'm/s', ""],
SENSOR_TYPE_WINDGUST: ['Wind gust', 'm/s', ""],
SENSOR_TYPE_WATT: ['Watt', 'W', ""],
}
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up Tellstick sensors. """
if discovery_info is None:
return
add_devices(TelldusLiveSensor(sensor) for sensor in discovery_info)
class TelldusLiveSensor(Entity):
""" Represents a Telldus Live sensor. """
def __init__(self, sensor_id):
self._id = sensor_id
self.update()
_LOGGER.debug("created sensor %s", self)
def update(self):
""" update sensor values """
tellduslive.NETWORK.update_sensors()
self._sensor = tellduslive.NETWORK.get_sensor(self._id)
@property
def _sensor_name(self):
return self._sensor["name"]
@property
def _sensor_value(self):
return self._sensor["data"]["value"]
@property
def _sensor_type(self):
return self._sensor["data"]["name"]
@property
def _battery_level(self):
sensor_battery_level = self._sensor.get("battery")
return round(sensor_battery_level * 100 / 255) \
if sensor_battery_level else None
@property
def _last_updated(self):
sensor_last_updated = self._sensor.get("lastUpdated")
return str(datetime.fromtimestamp(sensor_last_updated)) \
if sensor_last_updated else None
@property
def _value_as_temperature(self):
return round(float(self._sensor_value), 1)
@property
def _value_as_humidity(self):
return int(round(float(self._sensor_value)))
@property
def name(self):
""" Returns the name of the device. """
return "{} {}".format(self._sensor_name or DEVICE_DEFAULT_NAME,
self.quantity_name)
@property
def available(self):
return not self._sensor.get("offline", False)
@property
def state(self):
""" Returns the state of the device. """
if self._sensor_type == SENSOR_TYPE_TEMP:
return self._value_as_temperature
elif self._sensor_type == SENSOR_TYPE_HUMIDITY:
return self._value_as_humidity
@property
def device_state_attributes(self):
attrs = {}
if self._battery_level is not None:
attrs[ATTR_BATTERY_LEVEL] = self._battery_level
if self._last_updated is not None:
attrs[ATTR_LAST_UPDATED] = self._last_updated
return attrs
@property
def quantity_name(self):
""" name of quantity """
return SENSOR_TYPES[self._sensor_type][0]
@property
def unit_of_measurement(self):
return SENSOR_TYPES[self._sensor_type][1]
@property
def icon(self):
return SENSOR_TYPES[self._sensor_type][2]
| {
"content_hash": "05ca5ec287d41e2fc56397d7f2a8efc9",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 74,
"avg_line_length": 30.203007518796994,
"alnum_prop": 0.6333084391336818,
"repo_name": "nnic/home-assistant",
"id": "001d20ee79222e39966f8f33f48c1f9f1250ef7f",
"size": "4017",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/sensor/tellduslive.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1482064"
},
{
"name": "Python",
"bytes": "1790232"
},
{
"name": "Shell",
"bytes": "3570"
}
],
"symlink_target": ""
} |
import roslib; roslib.load_manifest('ar_mapping_base')
import rospy
from geometry_msgs.msg import PointStamped
import tf
from tf.transformations import euler_from_quaternion
import numpy
from ar_mapping_base.mapping_kf import *
from ar_track_alvar_msgs.msg import AlvarMarkers
class RoverMapping:
def __init__(self,name):
self.name = name
self.encoder_precision = 0.05 # [m]
self.ar_precision = 0.50 # [m]
self.target_frame = "/world"
rospy.init_node('rover_mapping')
self.name = rospy.get_param("~rover_name",self.name)
self.target_frame = rospy.get_param("~target_frame",self.target_frame)
self.ar_precision = rospy.get_param("~ar_precision",self.ar_precision)
rospy.loginfo("Starting rover driver for rover '%s' " % (self.name))
self.last_cmd = rospy.Time.now()
self.listener = tf.TransformListener()
self.connected = False
# Instantiate the right filter based on launch parameters
self.mapper = MappingKF()
def ar_cb(self, markers):
for m in markers.markers:
self.listener.waitForTransform(self.target_frame,'/%s/ground'% self.name, m.header.stamp, rospy.Duration(1.0))
self.listener.waitForTransform("/%s/ground"%self.name,m.header.frame_id, m.header.stamp, rospy.Duration(1.0))
((x,y,z),rot) = self.listener.lookupTransform(self.target_frame,'/%s/ground'%self.name, m.header.stamp)
euler = euler_from_quaternion(rot)
X = vstack([x,y,euler[2]])
m_pose = PointStamped()
m_pose.header = m.header
m_pose.point = m.pose.pose.position
m_pose = self.listener.transformPoint("/%s/ground"%self.name,m_pose)
Z = vstack([m_pose.point.x,m_pose.point.y])
self.mapper.update_ar(Z,X,m.id,self.ar_precision)
self.mapper.publish(self.target_frame,markers.header.stamp)
def run(self):
timeout = True
rate = rospy.Rate(2)
rospy.sleep(1.0)
self.ar_sub = rospy.Subscriber("/ar_pose_marker", AlvarMarkers, self.ar_cb)
while not rospy.is_shutdown():
rate.sleep()
if __name__ == '__main__':
try:
rd = RoverMapping("rover")
rd.run()
except rospy.ROSInterruptException:
pass
| {
"content_hash": "5175606fa9f034327f1262704d2ff398",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 122,
"avg_line_length": 38.0655737704918,
"alnum_prop": 0.6270456503014643,
"repo_name": "cedricpradalier/vrep_ros_ws",
"id": "4228ad0b3a7bb88d4ccb7b34d507c9d1e87defc5",
"size": "2344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ar_mapping_base/nodes/rover_mapping.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "133294"
},
{
"name": "C++",
"bytes": "612142"
},
{
"name": "CMake",
"bytes": "177096"
},
{
"name": "Makefile",
"bytes": "574"
},
{
"name": "Python",
"bytes": "292428"
},
{
"name": "Shell",
"bytes": "25"
}
],
"symlink_target": ""
} |
"""!
The main file with the class definitions
Core
==============
This file contains all of the classes for the module.
"""
from contextlib import contextmanager
import os.path as p
import re
from six import string_types
from builtins import int
import numpy as np
from scipy.io import FortranFile
from dask import delayed
import dask.array as dsa
import xarray as xr
class Grid(object):
"""Make a grid object containing all of the axes.
:param int nx: Number of grid points in the x direction
:param int ny: Number of grid points in the y direction
:param int layers: Number of active layers
:param float dx: Grid size in x direction in metres
:param float dy: Grid size in y direction in metres
:param float x0: x value at lower left corner of domain
:param float y0: y value at lower left corner of domain
The initialisation call returns an object containing each of the input parameters as well as the following arrays:
- x: x locations of the tracer points
- y: y locations of the tracer points
- xp1: x locations of the u velocity points and vorticity points
- yp1: y locations of the v velocity points and vorticity points
"""
def __init__(self,nx,ny,layers,dx,dy,x0=0,y0=0):
"""Instantiate a grid object for Aronnax."""
# axes for vorticity points
self.xp1 = np.linspace(x0,nx*dx+x0,nx+1)
self.yp1 = np.linspace(y0,ny*dy+y0,ny+1)
# Axes for tracer points.
self.x = (self.xp1[1:] + self.xp1[:-1])/2.
self.y = (self.yp1[1:] + self.yp1[:-1])/2.
# Size
self.nx = nx
self.ny = ny
self.layers = layers
# Grid spacing
self.dx = dx
self.dy = dy
@contextmanager
def fortran_file(*args, **kwargs):
f = FortranFile(*args, **kwargs)
try:
yield f
finally:
f.close()
def interpret_raw_file(name, nx, ny, layers):
"""Read an output file dumped by the Aronnax core.
Each such file contains one array, whose size depends on what,
exactly, is in it, and on the resolution of the simulation.
Hence, the parameters nx, ny, and layers, as well as the file
naming convetion, suffice to interpret the content (assuming it
was generated on the same system)."""
# Note: This depends on inspection of the output writing code in
# the Aronnax core, to align array sizes and dimensions. In
# particular, Fortran arrays are indexed in decreasing order of
# rate of change as one traverses the elements sequentially,
# whereas Python (and all other programming languages I am aware
# of) indexes in increasing order.
dx, dy, layers = __find_grid_offsets(name, nx, ny, layers)
with fortran_file(name, 'r') as f:
return f.read_reals(dtype=np.float64) \
.reshape(layers, ny+dy, nx+dx)
def __find_grid_offsets(name, nx, ny, layers):
"""Internal function for determining location of variable on the grid."""
file_part = p.basename(name)
dx = 0; dy = 0;
if file_part.startswith("snap.BP."):
pass
elif file_part.startswith("snap.eta."):
layers = 1
elif file_part.startswith("snap.eta_new."):
layers = 1
elif file_part.startswith("snap.eta_star."):
layers = 1
elif file_part.startswith("snap.h."):
pass
elif file_part.startswith("snap.u."):
dx = 1
elif file_part.startswith("snap.ub."):
dx = 1
layers = 1
elif file_part.startswith("snap.v."):
dy = 1
elif file_part.startswith("snap.vb."):
dy = 1
layers = 1
elif file_part.startswith("snap.zeta."):
dx = 1
dy = 1
elif file_part.startswith("wind_x."):
dx = 1
layers = 1
elif file_part.startswith("wind_y."):
dy = 1
layers = 1
elif file_part.startswith("av.h."):
pass
elif file_part.startswith("av.u."):
dx = 1
elif file_part.startswith("av.v."):
dy = 1
elif file_part.startswith("av.eta."):
layers = 1
elif file_part.startswith("debug.dhdt."):
pass
elif file_part.startswith("debug.dudt."):
dx = 1
elif file_part.startswith("debug.dvdt."):
dy = 1
else:
print('File not recognised - no output returned')
return
return dx, dy, layers
def interpret_raw_file_delayed(name, nx, ny, layers, dx, dy):
"""
Use Dask.delayed to lazily load a single output file. While this can be
used as is, it is intended to be an internal function called by `open_mfdataset`.
"""
d = dsa.from_delayed(delayed(interpret_raw_file)(name, nx, ny, layers),
(layers, ny+dy, nx+dx), float)
return d
def open_mfdataarray(files, grid):
"""Open a number of output files into an xarray dataarray. All files
must contain the same variable.
Uses dask.delayed to lazily load data as required.
:param list files: a list of file names to load
:param grid: a grid object created by aronnax.Grid
:return: xarray.DataArray of the desired variable
"""
files = sorted(files)
output_variables = set()
timestamps = []
for file_name in files:
variable_name = p.basename(file_name)
timestamps.append(float(variable_name.split('.')[-1]))
variable_name = '.'.join(variable_name.split('.')[:-1])
output_variables.add(variable_name)
output_variables = list(output_variables)
if len(output_variables) > 1:
raise ValueError\
('open_mfdataarray only supports loading multiple timestamps of a single variable.')
dx, dy, layers = __find_grid_offsets(files[0], grid.nx, grid.ny, grid.layers)
datasets = [interpret_raw_file_delayed(file_name, grid.nx,
grid.ny, layers, dx, dy)
for file_name in files]
ds = dsa.stack(datasets, axis=0)
if dx ==1 and dy == 1:
# variable at vorticity location
ds = xr.DataArray(ds, coords=dict(iter=timestamps,
layers=np.arange(layers),
yp1=grid.yp1, xp1=grid.xp1),
dims=['iter','layers','yp1','xp1'],
name=output_variables[0])
elif dx == 1:
# variable at u location
ds = xr.DataArray(ds, coords=dict(iter=timestamps,
layers=np.arange(layers),
y=grid.y, xp1=grid.xp1),
dims=['iter','layers','y','xp1'],
name=output_variables[0])
elif dy == 1:
# variable at v location
ds = xr.DataArray(ds, coords=dict(iter=timestamps,
layers=np.arange(layers),
yp1=grid.yp1, x=grid.x),
dims=['iter','layers','yp1','x'],
name=output_variables[0])
elif dx == 0 and dy ==0:
# variable at h location
ds = xr.DataArray(ds, coords=dict(iter=timestamps,
layers=np.arange(layers),
y=grid.y, x=grid.x),
dims=['iter','layers','y','x'],
name=output_variables[0])
else:
# not able to determine where we are
raise ValueError('Unable to determine grid location')
return ds
### General input construction helpers
def tracer_point_variable(grid, field_layers, *funcs):
"""Input generator for a variable at the tracer location of the grid. If passed a function, then that function can depend only on `X` and `Y`."""
X,Y = np.meshgrid(grid.x, grid.y)
T_variable = np.ones((field_layers, grid.ny, grid.nx))
assert field_layers == len(funcs)
for i, f in enumerate(funcs):
if isinstance(f, (int, float)):
T_variable[i,:,:] = f
else:
T_variable[i,:,:] = f(X, Y)
return T_variable
def u_point_variable(grid, field_layers, *funcs):
"""Input generator for a variable at the u location of the grid. If passed a function, then that function can depend only on `X` and `Y`."""
X,Y = np.meshgrid(grid.xp1, grid.y)
u_variable = np.ones((field_layers, grid.ny, grid.nx+1))
assert field_layers == len(funcs)
for i, f in enumerate(funcs):
if isinstance(f, (int, float)):
u_variable[i,:,:] = f
else:
u_variable[i,:,:] = f(X, Y)
return u_variable
def v_point_variable(grid, field_layers, *funcs):
"""Input generator for a variable at the v location of the grid. If passed a function, then that function can depend only on `X` and `Y`."""
X,Y = np.meshgrid(grid.x, grid.yp1)
v_variable = np.ones((field_layers, grid.ny+1, grid.nx))
assert field_layers == len(funcs)
for i, f in enumerate(funcs):
if isinstance(f, (int, float)):
v_variable[i,:,:] = f
else:
v_variable[i,:,:] = f(X, Y)
return v_variable
def time_series_variable(n_time_steps, dt, func):
'''Input generator for a time series variable. If passed a function, then that function can depend on the number of timesteps, `n_time_steps`, and the timestep, `dt`.'''
ts_variable = np.zeros((n_time_steps))
# number of elements in `func` list should always be one
assert len(func) == 1
for i, f in enumerate(func):
if isinstance(f, (int, float)):
ts_variable[:] = np.ones(n_time_steps) * f
else:
ts_variable[:] = f(n_time_steps, dt)
return ts_variable
def u_wind(grid, wind_n_records, f):
"""Input generator for a wind field at the u location of the grid. If passed a function, then that function can depend only on `X` and `Y`."""
X,Y = np.meshgrid(grid.xp1, grid.y)
u_variable = np.ones((wind_n_records, grid.ny, grid.nx+1))
if isinstance(f, (int, float)):
u_variable = u_variable*f
else:
if wind_n_records == 1:
u_variable[0,:,:] = f(X, Y)
elif wind_n_records > 1:
u_variable = f(X, Y, wind_n_records)
else:
raise ValueError('wind_n_records should be 1 or greater')
return u_variable
def v_wind(grid, wind_n_records, f):
"""Input generator for a variable at the v location of the grid. If passed a function, then that function can depend only on `X` and `Y`."""
X,Y = np.meshgrid(grid.x, grid.yp1)
v_variable = np.ones((wind_n_records, grid.ny+1, grid.nx))
if isinstance(f, (int, float)):
v_variable = v_variable*f
else:
if wind_n_records == 1:
v_variable[0,:,:] = f(X, Y)
elif wind_n_records > 1:
v_variable = f(X, Y, wind_n_records)
else:
raise ValueError('wind_n_records should be 1 or greater')
return v_variable
### Specific construction helpers
def f_plane_f_u(grid, field_layers, coeff):
"""Define an f-plane approximation to the Coriolis force (u location)."""
assert field_layers == 1
return np.ones((grid.nx+1, grid.ny), dtype=np.float64) * coeff
def f_plane_f_v(grid, field_layers, coeff):
"""Define an f-plane approximation to the Coriolis force (v location)."""
assert field_layers == 1
return np.ones((grid.nx, grid.ny+1), dtype=np.float64) * coeff
def beta_plane_f_u(grid, field_layers, f0, beta):
"""Define a beta-plane approximation to the Coriolis force (u location)."""
assert field_layers == 1
_, Y = np.meshgrid(grid.xp1, grid.y)
fu = f0 + Y*beta
return fu
def beta_plane_f_v(grid, field_layers, f0, beta):
"""Define a beta-plane approximation to the Coriolis force (v location)."""
assert field_layers == 1
_, Y = np.meshgrid(grid.x, grid.yp1)
fv = f0 + Y*beta
return fv
def rectangular_pool(grid, field_layers):
"""The wet mask file for a maximal rectangular pool."""
assert field_layers == 1
nx = grid.nx; ny = grid.ny
wetmask = np.ones((nx, ny), dtype=np.float64)
wetmask[ 0, :] = 0
wetmask[-1, :] = 0
wetmask[ :, 0] = 0
wetmask[ :,-1] = 0
return wetmask
specifier_rx = re.compile(r':(.*):(.*)')
ok_generators = {
'tracer_point_variable': tracer_point_variable,
'u_point_variable': u_point_variable,
'v_point_variable': v_point_variable,
'time_series_variable': time_series_variable,
'u_wind': u_wind,
'v_wind': v_wind,
'beta_plane_f_u': beta_plane_f_u,
'beta_plane_f_v': beta_plane_f_v,
'f_plane_f_u': f_plane_f_u,
'f_plane_f_v': f_plane_f_v,
'rectangular_pool': rectangular_pool,
}
def interpret_data_specifier(string):
m = re.match(specifier_rx, string)
if m:
name = m.group(1)
arg_str = m.group(2)
if len(arg_str) > 0:
args = [float(a) for a in arg_str.split(',')]
else:
args = []
return (ok_generators[name], args)
else:
return None
def interpret_requested_data(requested_data, shape, config):
"""Interpret a flexible input data specification.
The requested_data can be one of
- TODO A string giving the path to a NetCDF file, whose content
will be interpolated to match the desired grid specification;
- A string giving the path to a raw Fortran array file, whose
content will be used as-is;
- TODO A numpy array in memory, whose content will be used as-is,
or TODO interpolated; or
- A string specifying auto-generation of the required data, in this format:
:<generator_func_name>:arg1,arg2,...argn
- Python objects specifying auto-generation of the required data.
In this case, `interpret_requested_data` will construct the
appropriate `Grid` instance and pass it, together with the
`requested_data`, to an appropriate meta-generator for the array
shape of the needful datum (determined by the `shape` argument).
The exact API varies with the meta-generator, but they typically
interpret numbers as that constant and functions as an analytic
definition of the field, which is evaluated on appropriate numpy
arrays to produce the needed numerical values.
"""
grid = Grid(config.getint("grid", "nx"), config.getint("grid", "ny"),
config.getint("grid", "layers"),
config.getfloat("grid", "dx"), config.getfloat("grid", "dy"))
wind_n_records = config.getint("external_forcing", "wind_n_records")
field_layers = find_field_layers(shape, grid)
if isinstance(requested_data, string_types):
candidate = interpret_data_specifier(requested_data)
if candidate is not None:
(func, args) = candidate
return func(grid, field_layers, *args)
else:
# Assume Fortran file name
with fortran_file(requested_data, 'r') as f:
return f.read_reals(dtype=np.float64)
else:
if shape == "2dT" or shape == "3dT":
return tracer_point_variable(grid, field_layers, *requested_data)
if shape == "2dU" or shape == "3dU":
return u_point_variable(grid, field_layers, *requested_data)
if shape == "2dV" or shape == "3dV":
return v_point_variable(grid, field_layers, *requested_data)
if shape == "time":
n_time_steps = config.getint("numerics", "n_time_steps")
dt = config.getfloat("numerics", "dt")
return time_series_variable(n_time_steps, dt, requested_data)
if shape == 'windU':
return u_wind(grid, wind_n_records, *requested_data)
if shape == 'windV':
return v_wind(grid, wind_n_records, *requested_data)
else:
raise Exception("TODO implement custom generation for other input shapes")
def find_field_layers(shape, grid):
"""Given a particular field shape, return how many layers it should have."""
if shape == "2dT":
return 1
if shape == "3dT":
return grid.layers
if shape == "2dU":
return 1
if shape == "3dU":
return grid.layers
if shape == "2dV":
return 1
if shape == "3dV":
return grid.layers
| {
"content_hash": "8eb41eedadca797c85825a53c0e9214d",
"timestamp": "",
"source": "github",
"line_count": 467,
"max_line_length": 173,
"avg_line_length": 34.942184154175585,
"alnum_prop": 0.597867385709033,
"repo_name": "edoddridge/aronnax",
"id": "1d017dfa0a288b320b732a5d3f01fd35780990a8",
"size": "16318",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aronnax/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Fortran",
"bytes": "190195"
},
{
"name": "Makefile",
"bytes": "2093"
},
{
"name": "Python",
"bytes": "115223"
},
{
"name": "TeX",
"bytes": "3934"
}
],
"symlink_target": ""
} |
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
from twilio.rest.trunking.v1.trunk.credential_list import CredentialListList
from twilio.rest.trunking.v1.trunk.ip_access_control_list import IpAccessControlListList
from twilio.rest.trunking.v1.trunk.origination_url import OriginationUrlList
from twilio.rest.trunking.v1.trunk.phone_number import PhoneNumberList
class TrunkList(ListResource):
def __init__(self, version):
"""
Initialize the TrunkList
:param Version version: Version that contains the resource
:returns: twilio.rest.trunking.v1.trunk.TrunkList
:rtype: twilio.rest.trunking.v1.trunk.TrunkList
"""
super(TrunkList, self).__init__(version)
# Path Solution
self._solution = {}
self._uri = '/Trunks'.format(**self._solution)
def create(self, friendly_name=values.unset, domain_name=values.unset,
disaster_recovery_url=values.unset,
disaster_recovery_method=values.unset, recording=values.unset,
secure=values.unset):
"""
Create a new TrunkInstance
:param unicode friendly_name: The friendly_name
:param unicode domain_name: The domain_name
:param unicode disaster_recovery_url: The disaster_recovery_url
:param unicode disaster_recovery_method: The disaster_recovery_method
:param unicode recording: The recording
:param bool secure: The secure
:returns: Newly created TrunkInstance
:rtype: twilio.rest.trunking.v1.trunk.TrunkInstance
"""
data = values.of({
'FriendlyName': friendly_name,
'DomainName': domain_name,
'DisasterRecoveryUrl': disaster_recovery_url,
'DisasterRecoveryMethod': disaster_recovery_method,
'Recording': recording,
'Secure': secure,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return TrunkInstance(
self._version,
payload,
)
def stream(self, limit=None, page_size=None):
"""
Streams TrunkInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.trunking.v1.trunk.TrunkInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists TrunkInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.trunking.v1.trunk.TrunkInstance]
"""
return list(self.stream(
limit=limit,
page_size=page_size,
))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of TrunkInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of TrunkInstance
:rtype: twilio.rest.trunking.v1.trunk.TrunkPage
"""
params = values.of({
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(
'GET',
self._uri,
params=params,
)
return TrunkPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a TrunkContext
:param sid: The sid
:returns: twilio.rest.trunking.v1.trunk.TrunkContext
:rtype: twilio.rest.trunking.v1.trunk.TrunkContext
"""
return TrunkContext(
self._version,
sid=sid,
)
def __call__(self, sid):
"""
Constructs a TrunkContext
:param sid: The sid
:returns: twilio.rest.trunking.v1.trunk.TrunkContext
:rtype: twilio.rest.trunking.v1.trunk.TrunkContext
"""
return TrunkContext(
self._version,
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Trunking.V1.TrunkList>'
class TrunkPage(Page):
def __init__(self, version, response, solution):
"""
Initialize the TrunkPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:returns: twilio.rest.trunking.v1.trunk.TrunkPage
:rtype: twilio.rest.trunking.v1.trunk.TrunkPage
"""
super(TrunkPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of TrunkInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.trunking.v1.trunk.TrunkInstance
:rtype: twilio.rest.trunking.v1.trunk.TrunkInstance
"""
return TrunkInstance(
self._version,
payload,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Trunking.V1.TrunkPage>'
class TrunkContext(InstanceContext):
def __init__(self, version, sid):
"""
Initialize the TrunkContext
:param Version version: Version that contains the resource
:param sid: The sid
:returns: twilio.rest.trunking.v1.trunk.TrunkContext
:rtype: twilio.rest.trunking.v1.trunk.TrunkContext
"""
super(TrunkContext, self).__init__(version)
# Path Solution
self._solution = {
'sid': sid,
}
self._uri = '/Trunks/{sid}'.format(**self._solution)
# Dependents
self._origination_urls = None
self._credentials_lists = None
self._ip_access_control_lists = None
self._phone_numbers = None
def fetch(self):
"""
Fetch a TrunkInstance
:returns: Fetched TrunkInstance
:rtype: twilio.rest.trunking.v1.trunk.TrunkInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return TrunkInstance(
self._version,
payload,
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the TrunkInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def update(self, friendly_name=values.unset, domain_name=values.unset,
disaster_recovery_url=values.unset,
disaster_recovery_method=values.unset, recording=values.unset,
secure=values.unset):
"""
Update the TrunkInstance
:param unicode friendly_name: The friendly_name
:param unicode domain_name: The domain_name
:param unicode disaster_recovery_url: The disaster_recovery_url
:param unicode disaster_recovery_method: The disaster_recovery_method
:param unicode recording: The recording
:param bool secure: The secure
:returns: Updated TrunkInstance
:rtype: twilio.rest.trunking.v1.trunk.TrunkInstance
"""
data = values.of({
'FriendlyName': friendly_name,
'DomainName': domain_name,
'DisasterRecoveryUrl': disaster_recovery_url,
'DisasterRecoveryMethod': disaster_recovery_method,
'Recording': recording,
'Secure': secure,
})
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return TrunkInstance(
self._version,
payload,
sid=self._solution['sid'],
)
@property
def origination_urls(self):
"""
Access the origination_urls
:returns: twilio.rest.trunking.v1.trunk.origination_url.OriginationUrlList
:rtype: twilio.rest.trunking.v1.trunk.origination_url.OriginationUrlList
"""
if self._origination_urls is None:
self._origination_urls = OriginationUrlList(
self._version,
trunk_sid=self._solution['sid'],
)
return self._origination_urls
@property
def credentials_lists(self):
"""
Access the credentials_lists
:returns: twilio.rest.trunking.v1.trunk.credential_list.CredentialListList
:rtype: twilio.rest.trunking.v1.trunk.credential_list.CredentialListList
"""
if self._credentials_lists is None:
self._credentials_lists = CredentialListList(
self._version,
trunk_sid=self._solution['sid'],
)
return self._credentials_lists
@property
def ip_access_control_lists(self):
"""
Access the ip_access_control_lists
:returns: twilio.rest.trunking.v1.trunk.ip_access_control_list.IpAccessControlListList
:rtype: twilio.rest.trunking.v1.trunk.ip_access_control_list.IpAccessControlListList
"""
if self._ip_access_control_lists is None:
self._ip_access_control_lists = IpAccessControlListList(
self._version,
trunk_sid=self._solution['sid'],
)
return self._ip_access_control_lists
@property
def phone_numbers(self):
"""
Access the phone_numbers
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberList
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberList
"""
if self._phone_numbers is None:
self._phone_numbers = PhoneNumberList(
self._version,
trunk_sid=self._solution['sid'],
)
return self._phone_numbers
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Trunking.V1.TrunkContext {}>'.format(context)
class TrunkInstance(InstanceResource):
def __init__(self, version, payload, sid=None):
"""
Initialize the TrunkInstance
:returns: twilio.rest.trunking.v1.trunk.TrunkInstance
:rtype: twilio.rest.trunking.v1.trunk.TrunkInstance
"""
super(TrunkInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload['account_sid'],
'domain_name': payload['domain_name'],
'disaster_recovery_method': payload['disaster_recovery_method'],
'disaster_recovery_url': payload['disaster_recovery_url'],
'friendly_name': payload['friendly_name'],
'secure': payload['secure'],
'recording': payload['recording'],
'auth_type': payload['auth_type'],
'auth_type_set': payload['auth_type_set'],
'date_created': deserialize.iso8601_datetime(payload['date_created']),
'date_updated': deserialize.iso8601_datetime(payload['date_updated']),
'sid': payload['sid'],
'url': payload['url'],
'links': payload['links'],
}
# Context
self._context = None
self._solution = {
'sid': sid or self._properties['sid'],
}
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: TrunkContext for this TrunkInstance
:rtype: twilio.rest.trunking.v1.trunk.TrunkContext
"""
if self._context is None:
self._context = TrunkContext(
self._version,
sid=self._solution['sid'],
)
return self._context
@property
def account_sid(self):
"""
:returns: The account_sid
:rtype: unicode
"""
return self._properties['account_sid']
@property
def domain_name(self):
"""
:returns: The domain_name
:rtype: unicode
"""
return self._properties['domain_name']
@property
def disaster_recovery_method(self):
"""
:returns: The disaster_recovery_method
:rtype: unicode
"""
return self._properties['disaster_recovery_method']
@property
def disaster_recovery_url(self):
"""
:returns: The disaster_recovery_url
:rtype: unicode
"""
return self._properties['disaster_recovery_url']
@property
def friendly_name(self):
"""
:returns: The friendly_name
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def secure(self):
"""
:returns: The secure
:rtype: bool
"""
return self._properties['secure']
@property
def recording(self):
"""
:returns: The recording
:rtype: dict
"""
return self._properties['recording']
@property
def auth_type(self):
"""
:returns: The auth_type
:rtype: unicode
"""
return self._properties['auth_type']
@property
def auth_type_set(self):
"""
:returns: The auth_type_set
:rtype: unicode
"""
return self._properties['auth_type_set']
@property
def date_created(self):
"""
:returns: The date_created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date_updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def sid(self):
"""
:returns: The sid
:rtype: unicode
"""
return self._properties['sid']
@property
def url(self):
"""
:returns: The url
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: The links
:rtype: unicode
"""
return self._properties['links']
def fetch(self):
"""
Fetch a TrunkInstance
:returns: Fetched TrunkInstance
:rtype: twilio.rest.trunking.v1.trunk.TrunkInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the TrunkInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def update(self, friendly_name=values.unset, domain_name=values.unset,
disaster_recovery_url=values.unset,
disaster_recovery_method=values.unset, recording=values.unset,
secure=values.unset):
"""
Update the TrunkInstance
:param unicode friendly_name: The friendly_name
:param unicode domain_name: The domain_name
:param unicode disaster_recovery_url: The disaster_recovery_url
:param unicode disaster_recovery_method: The disaster_recovery_method
:param unicode recording: The recording
:param bool secure: The secure
:returns: Updated TrunkInstance
:rtype: twilio.rest.trunking.v1.trunk.TrunkInstance
"""
return self._proxy.update(
friendly_name=friendly_name,
domain_name=domain_name,
disaster_recovery_url=disaster_recovery_url,
disaster_recovery_method=disaster_recovery_method,
recording=recording,
secure=secure,
)
@property
def origination_urls(self):
"""
Access the origination_urls
:returns: twilio.rest.trunking.v1.trunk.origination_url.OriginationUrlList
:rtype: twilio.rest.trunking.v1.trunk.origination_url.OriginationUrlList
"""
return self._proxy.origination_urls
@property
def credentials_lists(self):
"""
Access the credentials_lists
:returns: twilio.rest.trunking.v1.trunk.credential_list.CredentialListList
:rtype: twilio.rest.trunking.v1.trunk.credential_list.CredentialListList
"""
return self._proxy.credentials_lists
@property
def ip_access_control_lists(self):
"""
Access the ip_access_control_lists
:returns: twilio.rest.trunking.v1.trunk.ip_access_control_list.IpAccessControlListList
:rtype: twilio.rest.trunking.v1.trunk.ip_access_control_list.IpAccessControlListList
"""
return self._proxy.ip_access_control_lists
@property
def phone_numbers(self):
"""
Access the phone_numbers
:returns: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberList
:rtype: twilio.rest.trunking.v1.trunk.phone_number.PhoneNumberList
"""
return self._proxy.phone_numbers
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Trunking.V1.TrunkInstance {}>'.format(context)
| {
"content_hash": "a37ee299eeeb75d5fb60de55cb47f104",
"timestamp": "",
"source": "github",
"line_count": 646,
"max_line_length": 94,
"avg_line_length": 30.758513931888544,
"alnum_prop": 0.5873678912934072,
"repo_name": "angadpc/Alexa-Project-",
"id": "bea1201b3fa10d3e3e68e0f630aca8d0b8fea853",
"size": "19885",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "twilio/rest/trunking/v1/trunk/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3097013"
},
{
"name": "Shell",
"bytes": "93"
}
],
"symlink_target": ""
} |
from Bot import Bot
from State import Move, State
import random
class RandomBot(Bot):
def __init__(self, name):
self.name = name
def get_name(self):
return self.name
def move(self, state, symbol):
while True:
x,y = random.randint(0, 8),random.randint(0, 8)
if state.get_board()[y,x] == 0 and not state.is_finished_minigame(int(x / 3), int(y / 3)):
break
return Move(x,y) | {
"content_hash": "253421709242312b5bb949784143c92f",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 102,
"avg_line_length": 25.444444444444443,
"alnum_prop": 0.5698689956331878,
"repo_name": "m3rik/nn",
"id": "7b6175da34c0765081e45df943d43b8971177c54",
"size": "458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "UltimateTicTacToe/RandomBot.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "5890"
},
{
"name": "Python",
"bytes": "79580"
}
],
"symlink_target": ""
} |
import numpy as np
import cv2
from .trans_tform import trans_tform
from .uvMat_from_uvMap import uvMat_from_uvMap
from .check_valid_uv import check_valid_uv
from .prep_source_patch import prep_source_patch
from .patch_cost import patch_cost
from .update_uvMap import update_uvMap
from .src_domain_tform import src_domain_tform
from .draw_plane_id import draw_plane_id
from .scale_tform import scale_tform
def debug(debug_info):
print(debug_info)
print(a)
def shape(a):
print(a.shape)
print(b)
def propagate(trgPatch, img, NNF, modelPlane, option, iDirect):
nUpdateTotal = 0
uvPixN = NNF.uvPixN[iDirect]
uvPixActiveInd = uvPixN.validInd.astype(np.int)
numUpdatePix = NNF.uvPix.numPix
while numUpdatePix != 0:
uvPix = type("uvPix", (), {})
uvPix.sub = NNF.uvPix.sub[uvPixActiveInd == 1, :].copy()
uvPix.ind = NNF.uvPix.ind[uvPixActiveInd == 1].copy()
uvPixNCur = type("uvPixNCur", (), {})
uvPixNCur.sub = uvPixN.sub[uvPixActiveInd == 1, :].copy()
uvPixNCur.ind = uvPixN.ind[uvPixActiveInd == 1].copy()
uvDtBdPixPosCur = NNF.uvDtBdPixPos[uvPixActiveInd == 1].copy()
trgPatchCur = trgPatch[:, :, uvPixActiveInd == 1].copy()
srcPosCur = NNF.uvTform.data[uvPixActiveInd == 1, 6:8].copy()
uvCostCur = NNF.uvCost.data[uvPixActiveInd == 1].copy()
uvPlaneIDCur = NNF.uvPlaneID.data[uvPixActiveInd == 1].copy()
srcPosMapCur = NNF.uvTform.map[:, :, 6:8].copy()
uvPixActivePos = np.where(uvPixActiveInd == 1)[0]
uvTformCand = uvMat_from_uvMap(NNF.uvTform.map, uvPixNCur.ind)
uvTformCand = trans_tform(uvTformCand, option.propDir[iDirect, :])
uvValidSrcInd = check_valid_uv(uvTformCand[:, 6:8], NNF.validPix.mask)
diff = np.abs(uvTformCand[:, 6:8] - srcPosCur)
uvValidDistInd = (diff[:, 0:1] > 1) | (diff[:, 1:2] > 1)
uvValidInd = (uvValidSrcInd == 1) & (uvValidDistInd == 1)
numUvValid = np.sum(uvValidInd)
if numUvValid != 0:
uvPixValid = type("uvPixValid", (), {})
uvPixValid.sub = uvPix.sub[uvValidInd.squeeze() == 1, :].copy()
uvPixValid.ind = uvPix.ind[uvValidInd.squeeze()].copy()
uvDtBdPixPosCur = uvDtBdPixPosCur[uvValidInd.squeeze()].copy()
trgPatchCur = trgPatchCur[:,:, uvValidInd.squeeze()].copy()
uvTformCand = uvTformCand[uvValidInd.squeeze(), :].copy()
uvCostCur = uvCostCur[uvValidInd.squeeze()].copy()
uvPlaneIDCand = uvPlaneIDCur[uvValidInd.squeeze()].copy()
uvPixUpdatePos = uvPixActivePos[uvValidInd.squeeze()].copy()
srcPatch = prep_source_patch(img, uvTformCand, option)
costPatchCandAll, uvBiasCand = patch_cost(trgPatchCur, srcPatch, modelPlane, uvPlaneIDCand,
uvPixValid.sub, uvTformCand, srcPosMapCur, uvDtBdPixPosCur,
option)
costPatchCand = np.sum(costPatchCandAll, axis=1)
updateInd = costPatchCand < uvCostCur.squeeze()
uvPixUpdatePos = uvPixUpdatePos[updateInd.squeeze()]
numUpdatePix = uvPixUpdatePos.shape[0]
else:
numUpdatePix = 0
if numUpdatePix != 0:
nUpdateTotal += numUpdatePix
NNF.uvTform.data[uvPixUpdatePos, :] = uvTformCand[updateInd, :]
NNF.uvCost.data[uvPixUpdatePos] = costPatchCand[updateInd][..., None]
NNF.uvPlaneID.data[uvPixUpdatePos] = uvPlaneIDCand[updateInd]
if option.useBiasCorrection:
NNF.uvBias.data[:, uvPixUpdatePos] = uvBiasCand[:, updateInd]
uvPixValidInd = uvPixValid.ind[updateInd]
NNF.uvTform.map = update_uvMap(NNF.uvTform.map, uvTformCand[updateInd,:], uvPixValidInd)
NNF.uvCost.map = update_uvMap(NNF.uvCost.map, costPatchCand[updateInd][..., None], uvPixValidInd)
if len(NNF.uvPlaneID.map.shape) == 2:
NNF.uvPlaneID.map = NNF.uvPlaneID.map[..., None]
NNF.uvPlaneID.map = update_uvMap(NNF.uvPlaneID.map,
uvPlaneIDCand[updateInd][..., None], uvPixValidInd)
uvPixNextSub = uvPixValid.sub[updateInd, :].copy()
uvPixNextSub = uvPixNextSub + option.propDir[iDirect, :][None, ...]
updateMap = NNF.uvPix.mask
updateMap[uvPixNextSub[:, 1], uvPixNextSub[:, 0]] = 0
uvPixActiveInd = updateMap[NNF.uvPix.sub[:, 1], NNF.uvPix.sub[:, 0]] == 0
uvPixActiveInd = (uvPixActiveInd == 1) & (uvPixN.validInd == 1)
return NNF, nUpdateTotal
def random_search(trgPatch, img, NNF, modelPlane, option):
H, W, Ch = img.shape
uvPix = NNF.uvPix
numUvPix = uvPix.sub.shape[0]
searchRad = max(H, W) / 2
nUpdateTotal = 0
iter = 0
while searchRad > 1:
iter += 1
searchRad = searchRad / 2
if searchRad < 1:
break
srcPosMapCur = NNF.uvTform.map[:, :, 6:8]
uvTformCandCur = uvMat_from_uvMap(NNF.uvTform.map, uvPix.ind)
srcPos = uvTformCandCur[:, 6:8] + 2 * searchRad * (np.random.rand(numUvPix, 2) - 0.5)
uvPlaneIDCand = draw_plane_id(NNF.uvPlaneID.planeProbAcc)
uvTformCand = src_domain_tform(uvPlaneIDCand.squeeze(), modelPlane, [], srcPos, NNF.uvPix.sub, 1)
uvTformScale = scale_tform(uvTformCand)
uvValidScaleInd = (uvTformScale.squeeze() > option.minScale) & (uvTformScale.squeeze() < option.maxScale)
uvValidSrcInd = check_valid_uv(uvTformCand[:, 6:8], NNF.validPix.mask)
uvValidInd = (uvValidSrcInd == 1).squeeze() & (uvValidScaleInd.squeeze() == 1).squeeze()
uvPixActivePos = np.array(np.where(uvValidInd.squeeze())).squeeze()
numActPix = uvPixActivePos.shape[0]
if numActPix != 0:
trgPatchCur = trgPatch[:, :, uvValidInd.squeeze()]
uvCostDataCur = NNF.uvCost.data[uvValidInd]
uvTformCandCur = uvTformCand[uvValidInd, :].copy()
uvPlaneIDCandCur = uvPlaneIDCand[uvValidInd].squeeze()
uvPixValid = type("uvPixValid", (), {})
uvPixValid.sub = uvPix.sub[uvValidInd, :]
uvPixValid.ind = uvPix.ind[uvValidInd]
uvDtBdPixPosCur = NNF.uvDtBdPixPos[uvValidInd].copy()
srcPatch = prep_source_patch(img, uvTformCandCur, option)
[costPatchCandAll, uvBiasCand] = patch_cost(trgPatchCur, srcPatch, modelPlane, uvPlaneIDCandCur,
uvPixValid.sub, uvTformCandCur, srcPosMapCur, uvDtBdPixPosCur, option)
costPatchCand = np.sum(costPatchCandAll, axis=1)
updateInd = (costPatchCand.squeeze() < uvCostDataCur.squeeze())
nUpdate = np.sum(updateInd)
if nUpdate != 0:
uvPixActivePos = uvPixActivePos[updateInd]
nUpdateTotal = nUpdateTotal + nUpdate
NNF.uvTform.data[uvPixActivePos, :] = uvTformCandCur[updateInd, :].copy()
NNF.uvPlaneID.data[uvPixActivePos] = uvPlaneIDCandCur[updateInd].copy()
NNF.uvCost.data[uvPixActivePos] = costPatchCand[updateInd][..., None].copy()
if option.useBiasCorrection:
NNF.uvBias.data[:, uvPixActivePos] = uvBiasCand[:, updateInd]
uvPixValidInd = uvPixValid.ind[updateInd].copy()
NNF.uvTform.map = update_uvMap(NNF.uvTform.map, uvTformCandCur[updateInd,:], uvPixValidInd)
NNF.uvPlaneID.map = update_uvMap(NNF.uvPlaneID.map, uvPlaneIDCandCur[updateInd][..., None],
uvPixValidInd)
NNF.uvCost.map = update_uvMap(NNF.uvCost.map, costPatchCand[updateInd][..., None],
uvPixValidInd)
return NNF, nUpdateTotal
def update_NNF(trgPatch, img, NNF, modelPlane, modelReg, option):
nUpdate = np.zeros((3))
for i in range(option.numPassPerIter):
for iDierct in range(4):
NNF, n = propagate(trgPatch, img, NNF, modelPlane, option, iDierct)
nUpdate[0] += n
NNF, n = random_search(trgPatch, img, NNF, modelPlane, option)
nUpdate[1] += n
return NNF, nUpdate | {
"content_hash": "6045dd8e88a35388963a85cb10b9c930",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 113,
"avg_line_length": 40.231884057971016,
"alnum_prop": 0.6101104707012488,
"repo_name": "takahiromorita/heroku-python-flask5",
"id": "37f4191b541f15fd44c1424df244981cfa53918a",
"size": "8328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/synthesis/update_NNF.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "4982"
},
{
"name": "JavaScript",
"bytes": "1419"
},
{
"name": "Python",
"bytes": "66604"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import with_statement
from ..base import ServerFrame
class Connected(ServerFrame):
# pylint: disable=no-init
abstract = True
class Connected10(Connected):
# pylint: disable=no-init
version = '1.0'
verb = 'CONNECTED'
escape_headers = False
class Connected11(Connected10):
# pylint: disable=no-init
version = '1.1'
headers_required = ('version',)
class Connected12(Connected11):
# pylint: disable=no-init
version = '1.2'
| {
"content_hash": "a616e954afab9f0c3b61ee46c2ad13e3",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 39,
"avg_line_length": 21.233333333333334,
"alnum_prop": 0.6907378335949764,
"repo_name": "skippyprime/stimpi",
"id": "a6d48706bfa206b65c5081f40ebef67a3c4785de",
"size": "1217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stimpi/frames/impl/connected.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "107556"
}
],
"symlink_target": ""
} |
from supriya.tools.ugentools.PureUGen import PureUGen
class Filter(PureUGen):
r'''Abstract base class for filter ugens.
'''
### CLASS VARIABLES ###
__documentation_section__ = 'Filter UGens'
__slots__ = ()
### PRIVATE METHODS ###
def _validate_inputs(self):
self._check_rate_same_as_first_input_rate() | {
"content_hash": "14275386b8745028e254b271fcc321aa",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 53,
"avg_line_length": 20.235294117647058,
"alnum_prop": 0.627906976744186,
"repo_name": "andrewyoung1991/supriya",
"id": "54b44a592106fc55ef939b122b031f77ed2e0260",
"size": "370",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "supriya/tools/ugentools/Filter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "6712"
},
{
"name": "CSS",
"bytes": "446"
},
{
"name": "HTML",
"bytes": "1083"
},
{
"name": "JavaScript",
"bytes": "6163"
},
{
"name": "Makefile",
"bytes": "6775"
},
{
"name": "Python",
"bytes": "2693776"
}
],
"symlink_target": ""
} |
"""add post
Revision ID: f661af5e4d2a
Revises: 49c5959628c4
Create Date: 2017-07-01 11:14:18.714676
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f661af5e4d2a'
down_revision = '49c5959628c4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('posts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_posts_timestamp'), 'posts', ['timestamp'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_posts_timestamp'), table_name='posts')
op.drop_table('posts')
# ### end Alembic commands ###
| {
"content_hash": "b9e7542b1decad5a6c681358cbdb91af",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 85,
"avg_line_length": 28.45945945945946,
"alnum_prop": 0.6628679962013295,
"repo_name": "keer2345/flasky",
"id": "f72d53d9337d5a2321f3d4fc6c781e88052de61f",
"size": "1053",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "migrations/versions/f661af5e4d2a_add_post.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1952"
},
{
"name": "HTML",
"bytes": "22007"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "55640"
}
],
"symlink_target": ""
} |
import argparse
import sys
import logging
import os
import gzip
import subprocess
DEBUG=False
NotDEBUG=not DEBUG
parser = argparse.ArgumentParser(description="Combine gCNV from GATK4 cohort pipeline",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-i', '--input', action='store', nargs='?', help='Input gCNV files', required=NotDEBUG)
parser.add_argument('-o', '--output', action='store', nargs='?', help="Output file file", required=NotDEBUG)
parser.add_argument('-b', '--bedfile', action='store', nargs='?', help="Interval file in bed format", required=NotDEBUG)
parser.add_argument('-s', '--minimumScoreDifference', action='store', nargs='?', help="The minimum phred-scaled log posterior score difference between CNV event and normal event", default=30)
parser.add_argument('-f', '--minimumDuplicationFold', action='store', nargs='?', help="The minimum copy number fold change as duplication", default=2)
parser.add_argument('-p', '--percentage', action='store', default=0.9, type=float, nargs='?', help='Max sample percentage allowed')
parser.add_argument('--annovar_db', action='store', nargs='?', help='Annovar database folder')
parser.add_argument('--annovar_buildver', action='store', nargs='?', help='Annovar genome buildver')
args = parser.parse_args()
if DEBUG:
args.input = "T:/Shared/Labs/Linton Lab/20180913_linton_exomeseq_2118_human_cutadapt/GATK4_CNV_Germline_CombineGCNV/result/linton_exomeseq_2118__fileList1.list"
args.output = "T:/Shared/Labs/Linton Lab/20180913_linton_exomeseq_2118_human_cutadapt/GATK4_CNV_Germline_CombineGCNV/result/linton_exomeseq_2118.tsv"
args.bedfile = "T:/Shared/Labs/Linton Lab/20180913_linton_exomeseq_2118_human_cutadapt/xgen-exome-research-panel-targetsae255a1532796e2eaa53ff00001c1b3c.nochr.bed"
logger = logging.getLogger('combineGCNV')
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)-8s - %(message)s')
fileMap = {}
with open(args.input) as fh:
for line in fh:
filepath, name = line.strip().split('\t', 1)
fileMap[name] = filepath.strip()
samples = []
vcf1 = []
vcfMap = {}
bFirst = True
for name in fileMap.keys():
filepath=fileMap[name]
logger.info("reading " + name + " ...")
samples.append(name)
with gzip.open(filepath, "rt") as fh:
lines = []
for line in fh:
if line.startswith('#'):
continue
parts = line.rstrip().split('\t')
if bFirst:
vcf1.append(parts)
lines.append(parts[9])
vcfMap[name] = lines
bFirst = False
#if len(samples) == 2:
# break
logger.info("reading " + args.bedfile + " ...")
annotationMap = {}
with open(args.bedfile, "r") as fin:
for line in fin:
parts = line.rstrip().split('\t')
if not parts[0] in annotationMap:
chrList = []
annotationMap[parts[0]] = chrList
else:
chrList = annotationMap[parts[0]]
if(len(parts) >= 4):
chrList.append([int(parts[1]), int(parts[2]), parts[3]])
else:
chrList.append([int(parts[1]), int(parts[2]), "%s:%s-%s" % (parts[0], parts[1], parts[2])])
samples = sorted(samples)
# samples = sorted(fileMap.keys())
combinedFile = args.output + ".combined.txt"
annovarInputFile = args.output + ".avinput"
logger.info("outputing to " + combinedFile + " ...")
with open(annovarInputFile, "w") as fav:
with open(combinedFile, "w") as fout:
fout.write("#Chrom\tStart\tEnd\tName\tGene\tcytoBand\t%s\n" % "\t".join(samples))
intervalCount = len(vcf1)
for idx in range(0, intervalCount):
chrom = vcf1[idx][0]
values = []
for sample in samples:
gt = vcfMap[sample][idx]
if (gt.startswith("0:")):
values.append("")
else:
parts = gt.split(":")
cn = int(parts[1])
expectCN = 2
if chrom == 'X' or chrom == 'Y':
expectCN = 1
scores = parts[2].split(",")
cnScore = int(scores[cn])
expectScore = int(scores[expectCN])
diffScore = expectScore - cnScore
if cn > expectCN:
if cn < expectCN * args.minimumDuplicationFold:
values.append("")
continue
cnType = "DUP"
else:
cnType = "DEL"
if expectScore - cnScore < args.minimumScoreDifference:
values.append("")
else:
values.append("%s,%s,%s,%d" % (cnType, parts[0], cn, diffScore))
if (all(gt == "" for gt in values)):
continue
chrom = vcf1[idx][0]
start = int(vcf1[idx][1])
end = int(vcf1[idx][7][4:])
annotation = "";
annList = annotationMap[vcf1[idx][0]]
for idx, ann in enumerate(annList):
if ann[1] < start:
continue
if ann[0] > end:
if idx == 0:
annotation = ann[2]
elif (ann[0] - end) < (start - annList[idx-1][1]):
annotation = ann[2]
else:
annotation = annList[idx-1][2]
else:
annotation = ann[2]
break
cnvs = [v for v in values if v != ""]
if len(cnvs) < len(values) * args.percentage:
fout.write("%s\t%d\t%d\t%s\t\t\t%s\n" % (chrom, start, end, annotation, "\t".join(values)))
fav.write("%s\t%d\t%d\t0\t0\n" % (chrom, start, end))
if args.annovar_db == None:
if os.path.isfile(args.output):
os.remove(args.output)
os.rename(combinedFile, args.output)
else:
logger.info("performing annovar ...")
annovarOutput = annovarInputFile + ".annovar"
subprocess.call(['table_annovar.pl', annovarInputFile, args.annovar_db, '-buildver', args.annovar_buildver, '-protocol', 'refGene,cytoBand','-operation', 'g,r', '--remove', '--outfile', annovarOutput])
annovarOutputFile = annovarOutput + ".%s_multianno.txt" % args.annovar_buildver
with open(args.output, "wt") as fout:
with open(combinedFile, "rt") as fin:
fout.write(fin.readline())
with open(annovarOutputFile, "rt") as fann:
fann.readline()
for line in fin:
lineAnno = fann.readline()
annoParts = lineAnno.split('\t')
parts = line.split('\t')
parts[4] = annoParts[6]
parts[5] = annoParts[10].rstrip()
fout.write("\t".join(parts))
os.remove(annovarOutputFile)
os.remove(combinedFile)
logger.info("done.")
| {
"content_hash": "1f7cb87935997d875bee150a175107dc",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 203,
"avg_line_length": 38.576470588235296,
"alnum_prop": 0.6001829826166514,
"repo_name": "shengqh/ngsperl",
"id": "4d3b057ff87ced704765eb219b0bff0566cd284f",
"size": "6558",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/GATK4/combineGCNV.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "639"
},
{
"name": "Jupyter Notebook",
"bytes": "34901"
},
{
"name": "Perl",
"bytes": "2299329"
},
{
"name": "Python",
"bytes": "629212"
},
{
"name": "R",
"bytes": "993768"
},
{
"name": "Shell",
"bytes": "6043"
},
{
"name": "wdl",
"bytes": "5959"
}
],
"symlink_target": ""
} |
import pathlib
import re
from typing import Any, BinaryIO, cast, Dict, List, Optional, Tuple, Union
import numpy as np
from torchdata.datapipes.iter import Demultiplexer, Filter, IterDataPipe, IterKeyZipper, LineReader, Mapper
from torchvision.prototype.datasets.utils import Dataset, EncodedImage, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import (
getitem,
hint_sharding,
hint_shuffling,
INFINITE_BUFFER_SIZE,
path_accessor,
path_comparator,
read_categories_file,
read_mat,
)
from torchvision.prototype.features import _Feature
from .._api import register_dataset, register_info
NAME = "sbd"
@register_info(NAME)
def _info() -> Dict[str, Any]:
return dict(categories=read_categories_file(NAME))
@register_dataset(NAME)
class SBD(Dataset):
"""
- **homepage**: http://home.bharathh.info/pubs/codes/SBD/download.html
- **dependencies**:
- <scipy `https://scipy.org`>_
"""
def __init__(
self,
root: Union[str, pathlib.Path],
*,
split: str = "train",
skip_integrity_check: bool = False,
) -> None:
self._split = self._verify_str_arg(split, "split", ("train", "val", "train_noval"))
self._categories = _info()["categories"]
super().__init__(root, dependencies=("scipy",), skip_integrity_check=skip_integrity_check)
def _resources(self) -> List[OnlineResource]:
resources = [
HttpResource(
"https://www2.eecs.berkeley.edu/Research/Projects/CS/vision/grouping/semantic_contours/benchmark.tgz",
sha256="6a5a2918d5c73ce032fdeba876574d150d9d04113ab87540a1304cbcc715be53",
)
]
if self._split == "train_noval":
resources.append(
HttpResource(
"http://home.bharathh.info/pubs/codes/SBD/train_noval.txt",
sha256="0b2068f7a359d2907431803e1cd63bf6162da37d7d503b589d3b08c6fd0c2432",
)
)
return resources # type: ignore[return-value]
def _classify_archive(self, data: Tuple[str, Any]) -> Optional[int]:
path = pathlib.Path(data[0])
parent, grandparent, *_ = path.parents
if grandparent.name == "dataset":
if parent.name == "img":
return 0
elif parent.name == "cls":
return 1
if parent.name == "dataset" and self._split != "train_noval":
return 2
return None
def _prepare_sample(self, data: Tuple[Tuple[Any, Tuple[str, BinaryIO]], Tuple[str, BinaryIO]]) -> Dict[str, Any]:
split_and_image_data, ann_data = data
_, image_data = split_and_image_data
image_path, image_buffer = image_data
ann_path, ann_buffer = ann_data
anns = read_mat(ann_buffer, squeeze_me=True)["GTcls"]
return dict(
image_path=image_path,
image=EncodedImage.from_file(image_buffer),
ann_path=ann_path,
# the boundaries are stored in sparse CSC format, which is not supported by PyTorch
boundaries=_Feature(np.stack([raw_boundary.toarray() for raw_boundary in anns["Boundaries"].item()])),
segmentation=_Feature(anns["Segmentation"].item()),
)
def _datapipe(self, resource_dps: List[IterDataPipe]) -> IterDataPipe[Dict[str, Any]]:
if self._split == "train_noval":
archive_dp, split_dp = resource_dps
images_dp, anns_dp = Demultiplexer(
archive_dp,
2,
self._classify_archive,
buffer_size=INFINITE_BUFFER_SIZE,
drop_none=True,
)
else:
archive_dp = resource_dps[0]
images_dp, anns_dp, split_dp = Demultiplexer(
archive_dp,
3,
self._classify_archive,
buffer_size=INFINITE_BUFFER_SIZE,
drop_none=True,
)
split_dp = Filter(split_dp, path_comparator("name", f"{self._split}.txt"))
split_dp = LineReader(split_dp, decode=True)
split_dp = hint_shuffling(split_dp)
split_dp = hint_sharding(split_dp)
dp = split_dp
for level, data_dp in enumerate((images_dp, anns_dp)):
dp = IterKeyZipper(
dp,
data_dp,
key_fn=getitem(*[0] * level, 1),
ref_key_fn=path_accessor("stem"),
buffer_size=INFINITE_BUFFER_SIZE,
)
return Mapper(dp, self._prepare_sample)
def __len__(self) -> int:
return {
"train": 8_498,
"val": 2_857,
"train_noval": 5_623,
}[self._split]
def _generate_categories(self) -> Tuple[str, ...]:
resources = self._resources()
dp = resources[0].load(self._root)
dp = Filter(dp, path_comparator("name", "category_names.m"))
dp = LineReader(dp)
dp = Mapper(dp, bytes.decode, input_col=1)
lines = tuple(zip(*iter(dp)))[1]
pattern = re.compile(r"\s*'(?P<category>\w+)';\s*%(?P<label>\d+)")
categories_and_labels = cast(
List[Tuple[str, ...]],
[
pattern.match(line).groups() # type: ignore[union-attr]
# the first and last line contain no information
for line in lines[1:-1]
],
)
categories_and_labels.sort(key=lambda category_and_label: int(category_and_label[1]))
categories, _ = zip(*categories_and_labels)
return categories
| {
"content_hash": "a067ce9aaafa77dcf9524cd6816d6cc1",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 118,
"avg_line_length": 34.644171779141104,
"alnum_prop": 0.570391358243315,
"repo_name": "pytorch/vision",
"id": "01dd1d888f5bcb7acb3c717e940d6e8b192fd0f4",
"size": "5647",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "torchvision/prototype/datasets/_builtin/sbd.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "20242"
},
{
"name": "C",
"bytes": "930"
},
{
"name": "C++",
"bytes": "366825"
},
{
"name": "CMake",
"bytes": "18266"
},
{
"name": "Cuda",
"bytes": "90174"
},
{
"name": "Dockerfile",
"bytes": "1608"
},
{
"name": "Java",
"bytes": "21833"
},
{
"name": "Objective-C",
"bytes": "2715"
},
{
"name": "Objective-C++",
"bytes": "3284"
},
{
"name": "PowerShell",
"bytes": "2874"
},
{
"name": "Python",
"bytes": "3952070"
},
{
"name": "Ruby",
"bytes": "1086"
},
{
"name": "Shell",
"bytes": "35660"
}
],
"symlink_target": ""
} |
import random
from collections import defaultdict
from giant.giant_base.giant_base import GiantError
def raise_(ex):
raise ex
swagger_to_csharp_enum_example_map = {
'string': defaultdict(lambda: lambda enum: '"' + random.choice(enum) + '";',
{
'guid': lambda enum: 'new Guid(' + random.choice(enum) + ');',
'date': lambda enum: 'DateTime.parse(' + random.choice(enum) + ');',
'date-time': lambda enum: 'DateTime.parse(' + random.choice(enum) + ');',
'byte': lambda enum: raise_(GiantError('Shiver me timbers, I can\'t parse a enum byte type. Implement it yerself!')),
'binary': lambda enum: raise_(GiantError('Shiver me timbers, I can\'t parse a enum binary type. Implement it yerself!')),
'password': lambda enum: random.choice(enum)
}
),
'integer': defaultdict(lambda: lambda enum: str(random.choice(enum)) + ';',
{
'int32': lambda enum: str(random.choice(enum)) + ';',
'int64': lambda enum: str(random.choice(enum)) + ';'
}
),
'number': defaultdict(lambda: lambda enum: str(random.choice(enum)) + ';',
{
'float': lambda enum: str(random.choice(enum)) + ';',
'double': lambda enum: str(random.choice(enum)) + ';'
}
),
'boolean': defaultdict(lambda: lambda: str(random.choice(enum)) + ';')
}
def example_integer(schema):
minimum = schema.get('minimum', 1)
maximum = schema.get('maximum', minimum + 100)
multiple = schema.get('multipleOf', 1)
return random.choice(range(minimum, maximum, multiple))
def example_float(schema):
minimum = schema.get('minimum', 0.0)
maximum = schema.get('maximum', 100.0)
multiple = schema.get('multipleOf', 0.01)
return str(round(random.uniform(minimum, maximum) / multiple) * multiple)
swagger_to_csharp_example_map = {
'string': defaultdict(lambda: lambda schema: '"ExampleString";',
{
'guid': lambda schema: 'new Guid();',
'date': lambda schema: 'new DateTime();',
'date-time': lambda schema: 'new DateTime();',
'byte': lambda schema: 'new byte[10];',
'binary': lambda schema: 'new byte[10];',
'password': lambda schema: '"thepasswordispassword"'
}
),
'integer': defaultdict(lambda: lambda schema: str(example_integer(schema)) + ';',
{
'int32': lambda schema: str(example_integer(schema)) + ';',
'int64': lambda schema: str(example_integer(schema)) + ';'
}
),
'number': defaultdict(lambda: lambda schema: str(example_float(schema)) + ';',
{
'float': lambda schema: str(example_float(schema)) + ';',
'double': lambda schema: str(example_float(schema)) + ';'
}
),
'boolean': defaultdict(lambda: lambda schema: random.choice(('true;', 'false;')))
} | {
"content_hash": "1de3c086db23af4642736103c5b75902",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 133,
"avg_line_length": 41.140845070422536,
"alnum_prop": 0.5809654228004109,
"repo_name": "lixar/giant",
"id": "a657d3e0eb2647f88f53c91cb00f82cb8f4e7c5b",
"size": "2944",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "giant/plugins/servers/web_api_2/web_api_2/examples.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "158865"
},
{
"name": "Python",
"bytes": "80985"
}
],
"symlink_target": ""
} |
"""empty message
Revision ID: f6eab383c7de
Revises: None
Create Date: 2016-07-22 11:01:28.783123
"""
# revision identifiers, used by Alembic.
revision = 'f6eab383c7de'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('book',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('author', sa.String(), nullable=True),
sa.Column('publisher', sa.String(), nullable=True),
sa.Column('genter', sa.String(), nullable=True),
sa.Column('isbn', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('borrow',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('id_lender', sa.Integer(), nullable=False),
sa.Column('id_borrowe', sa.Integer(), nullable=False),
sa.Column('id_book', sa.Integer(), nullable=False),
sa.Column('final_date', sa.Date(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(), nullable=False),
sa.Column('password', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('cpf', sa.String(), nullable=True),
sa.Column('gender', sa.String(), nullable=True),
sa.Column('birtday', sa.Date(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('user')
op.drop_table('borrow')
op.drop_table('book')
### end Alembic commands ###
| {
"content_hash": "5319e280168c614df3348c3e102bc148",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 63,
"avg_line_length": 31.854545454545455,
"alnum_prop": 0.6478310502283106,
"repo_name": "leonardowolf/bookfree",
"id": "819fe017a56431f82b495f3ddf7ca385e6bfda37",
"size": "1752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "migrations/versions/f6eab383c7de_.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "11878"
},
{
"name": "CSS",
"bytes": "12462"
},
{
"name": "HTML",
"bytes": "478"
},
{
"name": "JavaScript",
"bytes": "12374"
},
{
"name": "Mako",
"bytes": "18186"
},
{
"name": "Python",
"bytes": "15929562"
},
{
"name": "Shell",
"bytes": "3269"
}
],
"symlink_target": ""
} |
from contextlib import contextmanager
import logging
from robot.utils import get_error_details, safe_str
from . import librarylogger
LEVELS = {'TRACE': logging.NOTSET,
'DEBUG': logging.DEBUG,
'INFO': logging.INFO,
'WARN': logging.WARNING,
'ERROR': logging.ERROR}
@contextmanager
def robot_handler_enabled(level):
root = logging.getLogger()
if any(isinstance(h, RobotHandler) for h in root.handlers):
yield
return
handler = RobotHandler()
old_raise = logging.raiseExceptions
root.addHandler(handler)
logging.raiseExceptions = False
set_level(level)
try:
yield
finally:
root.removeHandler(handler)
logging.raiseExceptions = old_raise
def set_level(level):
try:
level = LEVELS[level.upper()]
except KeyError:
return
logging.getLogger().setLevel(level)
class RobotHandler(logging.Handler):
def __init__(self, level=logging.NOTSET, library_logger=librarylogger):
super().__init__(level)
self.library_logger = library_logger
def emit(self, record):
message, error = self._get_message(record)
method = self._get_logger_method(record.levelno)
method(message)
if error:
self.library_logger.debug(error)
def _get_message(self, record):
try:
return self.format(record), None
except:
message = 'Failed to log following message properly: %s' \
% safe_str(record.msg)
error = '\n'.join(get_error_details())
return message, error
def _get_logger_method(self, level):
if level >= logging.ERROR:
return self.library_logger.error
if level >= logging.WARNING:
return self.library_logger.warn
if level >= logging.INFO:
return self.library_logger.info
if level >= logging.DEBUG:
return self.library_logger.debug
return self.library_logger.trace
| {
"content_hash": "7542200459252a46c2f70a91a0d1d787",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 75,
"avg_line_length": 27.863013698630137,
"alnum_prop": 0.6170108161258604,
"repo_name": "HelioGuilherme66/robotframework",
"id": "fdccb16329dd920a1dfd74058a9d4edecbf5761d",
"size": "2678",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/robot/output/pyloggingconf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "44706"
},
{
"name": "HTML",
"bytes": "86409"
},
{
"name": "JavaScript",
"bytes": "162950"
},
{
"name": "Python",
"bytes": "2671114"
},
{
"name": "RobotFramework",
"bytes": "1231105"
}
],
"symlink_target": ""
} |
"""
GOOGLE PLUS APP
This module describes the GooglePlusUser model.
Classes:
GooglePlusUser
Functions:
n/a
Created on 08 Nov 2013
@author: michael
"""
from django.conf import settings
from django.db import models
class GooglePlusUser(models.Model):
"""Returns the GooglePlusUser fields."""
user = models.OneToOneField(
settings.AUTH_USER_MODEL, related_name='google_plus_user'
)
google_user_id = models.CharField(max_length=255)
access_token = models.TextField()
refresh_token = models.TextField(blank=True, null=True)
id_token = models.TextField()
token_response = models.TextField()
access_token_expiry_timestamp = models.DateTimeField()
def __unicode__(self):
"""Returns the user object."""
return u'%s' % self.user
def update_access_token(self, access_token, access_token_expiry_timestamp,
refresh_token, id_token, token_response):
"""Update the access token and timestamp."""
self.access_token = access_token
self.access_token_expiry_timestamp = access_token_expiry_timestamp
self.refresh_token = refresh_token
self.id_token = id_token
self.token_response = token_response
self.save()
| {
"content_hash": "a0258dc379afecebdcecce7e26eb10eb",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 79,
"avg_line_length": 27.543478260869566,
"alnum_prop": 0.665351223362273,
"repo_name": "unomena/tunobase",
"id": "be614c2985ced64e19be5ca91bb458a3d356116a",
"size": "1267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tunobase/social_media/google_plus/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "47213"
},
{
"name": "Python",
"bytes": "780960"
}
],
"symlink_target": ""
} |
import os
import logging
import hiplogging
# Set up a standard logger
logger = logging.getLogger('hipchat')
logger.setLevel(logging.DEBUG)
# Add the standard logging to stderr
logger.addHandler(logging.StreamHandler())
# Add the hipchat handler
# Get an access token from: https://<YOUR_HIPCHAT_NAME>.hipchat.com/admin/api
handler = hiplogging.HipChatHandler(os.environ['HIPCHAT_ACCESS_TOKEN'],
os.environ['HIPCHAT_ROOM'])
# Additionally, you can add an optional third argument `environment`, if your
# host your HipChat server.
# i.e.: https//hipchat.yourdomain.com
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
# Try it out: messages will be visible both in the console and on hipchat.
logger.debug('debug - we are approaching the anomaly')
logger.info('info - shields up, red alert!')
logger.warn('warn - shield down to 15%')
logger.fatal('fatal - what shields?')
# You can set your own colors and the sender name using extra with logging.log.
# Valid colors are ["purple", "gray", "green", "yellow", "red"]
# The first argument is the log level:
# https://docs.python.org/2/library/logging.html#logging-levels
# https://docs.python.org/2/library/logging.html#logging.log
logger.log(20, "Another message", extra={'color':'purple', "sender":"FOOBAR"})
| {
"content_hash": "fcac0e361208d1c348ade804bdfc6e96",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 79,
"avg_line_length": 41.03125,
"alnum_prop": 0.7281035795887281,
"repo_name": "invernizzi/hiplogging",
"id": "2d34ee2e15e3eee9d18071a020618d7447247252",
"size": "1313",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3688"
},
{
"name": "Shell",
"bytes": "303"
}
],
"symlink_target": ""
} |
class BasePermission(object):
def has_permission(self, user, action, pk):
pass
class AllowAny(BasePermission):
def has_permission(self, user, action, pk):
return True
class IsAuthenticated(BasePermission):
def has_permission(self, user, action, pk):
return user.pk and user.is_authenticated
| {
"content_hash": "ff3e9a28ac6e021a120d46bd2962c41e",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 48,
"avg_line_length": 20.875,
"alnum_prop": 0.688622754491018,
"repo_name": "linuxlewis/channels-api",
"id": "19904665cad9566d0afcf447e7b89f850993914b",
"size": "336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "channels_api/permissions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "694"
},
{
"name": "Python",
"bytes": "29661"
}
],
"symlink_target": ""
} |
import rospy
import actionlib
import sys
#move_base_msgs
from move_base_msgs.msg import *
def simple_move(x,w):
rospy.init_node('simple_move')
#Simple Action Client
sac = actionlib.SimpleActionClient('move_base', MoveBaseAction )
#create goal
goal = MoveBaseGoal()
#use self?
#set goal
rospy.loginfo("Set X = "+x)
rospy.loginfo("Set W = "+w)
goal.target_pose.pose.position.x = float(x)
goal.target_pose.pose.orientation.w = float(w)
goal.target_pose.header.frame_id = 'first_move'
goal.target_pose.header.stamp = rospy.Time.now()
#start listner
rospy.loginfo("Waiting for server")
sac.wait_for_server()
rospy.loginfo("Sending Goals")
#send goal
sac.send_goal(goal)
rospy.loginfo("Waiting for server")
#finish
sac.wait_for_result()
#print result
print sac.get_result()
if __name__ == '__main__':
try:
simple_move(sys.argv[1],sys.argv[2])
except rospy.ROSInterruptException:
print "Keyboard Interrupt"
| {
"content_hash": "19405c5598e95cba96242e6c8aec1a85",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 68,
"avg_line_length": 19.25925925925926,
"alnum_prop": 0.6442307692307693,
"repo_name": "Rayal/ROS_proov",
"id": "887d35f8e079dc59230cb791573c5edfc0f33897",
"size": "1063",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ros_robotics_projects/chapter_9_codes/chefbot_code/chefbot_bringup/scripts/simple_navig_goals.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "27515"
},
{
"name": "C",
"bytes": "21824"
},
{
"name": "C++",
"bytes": "715939"
},
{
"name": "CMake",
"bytes": "96789"
},
{
"name": "HTML",
"bytes": "8942"
},
{
"name": "Java",
"bytes": "21506"
},
{
"name": "Matlab",
"bytes": "27202"
},
{
"name": "Python",
"bytes": "239933"
},
{
"name": "Shell",
"bytes": "262"
}
],
"symlink_target": ""
} |
"""Test case and runner for :func:`aglyph._importable`."""
__author__ = "Matthew Zipay <mattz@ninthtest.info>"
import logging
import unittest
from aglyph import _importable, __version__
from test import assertRaisesWithMessage, dummy
__all__ = [
"ImportableTest",
"suite",
]
# don't use __name__ here; can be run as "__main__"
_log = logging.getLogger("test.test_importable")
class ImportableTest(unittest.TestCase):
"""Test the :func:`aglyph._importable` function."""
def test_module_is_importable(self):
self.assertTrue(_importable(dummy))
def test_module_level_function_is_importable(self):
self.assertTrue(_importable(dummy.factory_function))
def test_nested_function_is_not_importable(self):
nested_function = dummy.factory_function(None)
self.assertFalse(_importable(nested_function))
def test_module_level_class_is_importable(self):
self.assertTrue(_importable(dummy.ModuleClass))
def test_nested_class_is_not_importable(self):
self.assertFalse(_importable(dummy.ModuleClass.NestedClass))
def test_instance_is_not_importable(self):
self.assertFalse(_importable(dummy.ModuleClass(None)))
def test_method_is_not_importable(self):
self.assertFalse(_importable(dummy.ModuleClass.method))
def test_literal_is_not_importable(self):
self.assertFalse(_importable(79))
def test_None_is_not_importable(self):
self.assertFalse(_importable(None))
def suite():
return unittest.makeSuite(ImportableTest)
if __name__ == "__main__":
unittest.TextTestRunner().run(suite())
| {
"content_hash": "33f0665965fcf979e772d042a2ccffc0",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 68,
"avg_line_length": 27.47457627118644,
"alnum_prop": 0.6952498457742134,
"repo_name": "mzipay/Aglyph",
"id": "479d8a284345cfdeff7902bde44323103d59dd90",
"size": "2763",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_importable.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "317262"
}
],
"symlink_target": ""
} |
import os
from time import time
from time import sleep
import datetime
from optparse import OptionParser
import pickle
from threading import Thread, Lock
import numpy as np
from scipy.misc import imresize
import led
#import keras
import keras.backend as K
import model_keras
import tensorflow as tf
try:
import config
except:
print( "Driver.py failed to load config file config.py." )
print( "Try copying config_empty.py to config.py and re-running." )
exit()
def log( message, options ):
logFileName = os.path.join(options.dir_ep, options.episode) + ".log"
fmt = '%Y%m%d-%H%M%S.%f'
datestr = datetime.datetime.now().strftime(fmt)
with open(logFileName,'a') as outf:
outf.write(datestr + ": " + message + "\n")
def linear_unbin(arr, N=15, offset=-1, R=2.0):
'''
preform inverse linear_bin, taking
one hot encoded arr, and get max value
rescale given R range and offset
'''
b = np.argmax(arr)
a = b *(R/(N + offset)) + offset
return a
class Driver:
def __init__(self, model_path, camera, controller, model_name=None):
""" model_path: location of the saved keras model
camera: A instance of PiVideoStream
controller: DriveDaemon which handles creating the camera and sending motor commands
"""
K.set_learning_phase(False)
self.stopped = False
self.model_path = model_path
self.camera = camera
self.controller = controller
#self.embedding = { "stop":0, "forward":1, "left":2, "right":3, "backward":4 }
self.embedding = [ "stop", "forward", "left", "right", "backward" ]
self.image_delay = 0.01
led.turnLEDOn( True, 11 )
led.turnLEDOn( True, 13 )
self.isRNN = False
self.continuous = False
if self.isRNN:
self.model = model_keras.make_model_lstm( len(self.embedding), (120,120,3), batch_size=1, timesteps=1, stateful=True, dropouts=[0.25,0.25,0.25,0.25,0.25] )
else:
#self.model = model_keras.make_model_test( len(self.embedding), (120,120,3), dropouts=[0.25,0.25,0.25,0.25,0.25] )
if self.continuous:
self.model = model_keras.make_model_fc( 1, (120,120,3), dkconv=True, dropouts=[0.25,0.25,0.25,0.25,0.25], categorical=False )
#self.model = model_keras.read_model( model_path, model_name )
else:
self.model = model_keras.read_model( model_path, model_name )
#self.model = model_keras.make_model_fc( len(self.embedding), (120,120,3), dkconv=True, dropouts=[0.25,0.25,0.25,0.25,0.25] )
self.model.load_weights( os.path.join( model_path, model_name+"_weights.h5" ) )
self.model._make_predict_function()
self.graph = tf.get_default_graph()
#self.graph = K.get_session().graph # This should work, too
led.turnAllLEDOn( False )
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
# I'm not 100% sure this will result in everything being closed. Best to also call stop().
self.stopped = True
def stop(self,signum=0,frame=0):
# indicate that the thread should be stopped
self.stopped = True
led.turnAllLEDOn( False )
def _step(self):
if self.stopped:
return
(image,pre_image) = self.camera.read()
image = pre_image
if image is not None:
t2 = time()
image = self.pre_process(image, image_norm=False)
t3 = time()
actions = self.model.predict_on_batch(image)
if self.isRNN:
actions = actions[0][0]
else:
actions = actions[0]
t4 = time()
#print( "Actions: {}".format( actions ) )
#if action == 0:
# action = np.argmax( actions[1:] ) + 1
# #print( "skipping stop action" )
if len(actions) == 15:
steering = linear_unbin(actions)
action = 'dk {} {}'.format( steering, 1.0 )
self.controller.do_action( action )
elif self.continuous:
if len(actions) == 2:
action = 'throttles {} {}'.format( actions[0], actions[1] )
else:
action = 'dk {} {}'.format( actions[0], 1.0 )
self.controller.do_action( action )
else:
action = np.argmax(actions) # No exploration, just choose the best
self.controller.do_action( self.embedding[action] )
#print( "Times; {} {}".format( t3-t2, t4-t3 ) )
def _drive( self ):
led.turnLEDOn( True, 11 )
with self.graph.as_default():
while not self.stopped:
sleep(self.image_delay)
self._step()
self.controller.do_action( "stop" )
led.turnAllLEDOn( False )
def startDriving( self ):
self.stopped = False
Thread(target=self._drive, args=()).start()
def endDriving( self ):
self.stop()
def softmax(self, x):
probs = np.exp(x - np.max(x))
probs /= np.sum(probs )
return probs
def pre_process(self, image, image_norm=True ):
image = image.astype(np.float) # / 255.0
if image.shape[0] > 120 or image.shape[1] > 120:
image = imresize(image, (120,120), interp='nearest' ) # This is slow, 0.3 - 0.4 seconds
if image_norm:
image[:,:,0] -= np.mean(image[:,:,0])
image[:,:,1] -= np.mean(image[:,:,1])
image[:,:,2] -= np.mean(image[:,:,2])
image[:,:,0] /= np.std(image[:,:,0])
image[:,:,1] /= np.std(image[:,:,1])
image[:,:,2] /= np.std(image[:,:,2])
else:
image /= 255.0
if self.isRNN:
image = image.reshape( 1, 1, 120, 120, 3 )
else:
image = image.reshape( 1, 120, 120, 3 )
return image
def getOptions():
usage = "Usage: python ./drive.py [options]"
parser = OptionParser()
parser.add_option("-e","--episode", help="Episode Name. Used for episode related file names. Defaults to date/time.");
parser.add_option("--dir_ep", help="Directory for saving all episode related files. Defaults to episode name.");
parser.add_option("--test_only", action="store_true", default=False, help="Run tests, then exit.");
parser.add_option("--video", action="store_true", default=False, help="Record video during an episode.");
parser.add_option("-s","--steps", type="int", default=300, help="Number of steps to run. Defaults to 300.");
(options, args) = parser.parse_args()
if not options.episode:
n = datetime.datetime.now()
options.episode = n.strftime('%Y%m%d_%H%M%S.drive')
if not options.dir_ep:
options.dir_ep = options.episode
else:
options.dir_ep = os.path.join( options.dir_ep, options.episode )
options.dir_ep = os.path.expanduser(options.dir_ep)
# Hard code this for now
options.im_format = "numpy"
return (options, args)
def test(options):
cwd = os.getcwd()
print( cwd )
actions_path = os.path.join( options.dir_ep, options.episode+"_actions.text" )
print( actions_path )
actions_path = os.path.abspath(actions_path)
print( actions_path )
exit()
if __name__ == "__main__":
(options, args) = getOptions()
if options.test_only:
test(options)
#def __init__(self, drive_dir, video_path=None, camera=None, image_delay=None):
#with Driver( os.path.expanduser("~/models/default.h5"), None, None ) as adrive:
with Driver( os.path.expanduser("~/models"), None, None, model_name="Binned1" ) as adrive:
adrive.startDriving()
sleep(20)
adrive.endDriving()
| {
"content_hash": "88df2178e1e39274d42db4088ac30d3d",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 167,
"avg_line_length": 36.54883720930233,
"alnum_prop": 0.5754644947823874,
"repo_name": "Bleyddyn/malpi",
"id": "e709708e5cdd09bc899ea3e2e96490578184d496",
"size": "7858",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "robot/Driver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cython",
"bytes": "4982"
},
{
"name": "Jupyter Notebook",
"bytes": "16321"
},
{
"name": "Python",
"bytes": "947508"
},
{
"name": "Shell",
"bytes": "1444"
}
],
"symlink_target": ""
} |
""" namedtuple""" | {
"content_hash": "a7ee812ca3195c401dbbebf994906083",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 17,
"avg_line_length": 17,
"alnum_prop": 0.5882352941176471,
"repo_name": "Aneesh540/python-projects",
"id": "672bdafc2c2500dd0e99f43b937e03e1a208a570",
"size": "17",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NEW/ch6.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "29202"
}
],
"symlink_target": ""
} |
from django.conf.urls import url
from django.contrib.admindocs import views
urlpatterns = [
url(r'^$',
views.BaseAdminDocsView.as_view(template_name='admin_doc/login.html'),
name='django-admindocs-docroot'),
url(r'^bookmarklets/$',
views.BookmarkletsView.as_view(),
name='django-admindocs-bookmarklets'),
url(r'^tags/$',
views.TemplateTagIndexView.as_view(),
name='django-admindocs-tags'),
url(r'^filters/$',
views.TemplateFilterIndexView.as_view(),
name='django-admindocs-filters'),
url(r'^views/$',
views.ViewIndexView.as_view(),
name='django-admindocs-views-index'),
url(r'^views/(?P<view>[^/]+)/$',
views.ViewDetailView.as_view(),
name='django-admindocs-views-detail'),
url(r'^models/$',
views.ModelIndexView.as_view(),
name='django-admindocs-models-index'),
url(r'^models/(?P<app_label>[^\.]+)\.(?P<model_name>[^/]+)/$',
views.ModelDetailView.as_view(),
name='django-admindocs-models-detail'),
url(r'^templates/(?P<template>.*)/$',
views.TemplateDetailView.as_view(),
name='django-admindocs-templates'),
]
| {
"content_hash": "03f93d09e2fea53dd7aabbdb9ae6ee86",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 78,
"avg_line_length": 37.25,
"alnum_prop": 0.6124161073825504,
"repo_name": "scifiswapnil/Project-LoCatr",
"id": "bff0222df7ff2511dc9fc6a9c7fc8777ca821d56",
"size": "1192",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/python2.7/site-packages/django/contrib/admindocs/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "47175"
},
{
"name": "HTML",
"bytes": "79071"
},
{
"name": "JavaScript",
"bytes": "115482"
},
{
"name": "Python",
"bytes": "6329362"
},
{
"name": "Shell",
"bytes": "3282"
}
],
"symlink_target": ""
} |
__author__ = 'ray'
__date__ = '4/10/15'
import re
import uuid
from collections import OrderedDict
import six
from stonemason.pyramid import Pyramid
from stonemason.formatbundle import MapType, TileFormat, FormatBundle
from stonemason.renderer import MasonRenderer
from stonemason.storage.tilestorage import NullClusterStorage, ClusterStorage, \
MetaTileStorageConcept, DiskClusterStorage, S3ClusterStorage, DiskMetaTileStorage, \
S3MetaTileStorage
from .theme import Theme, SchemaTheme
from .mapbook import MapBook
from .metadata import Metadata
from .mapsheet import MapSheet, ClusterMapSheet, MetaTileMapSheet
from .exceptions import UnknownStorageType, InvalidMapSheetTag
class MapSheetBuilder(object):
def __init__(self):
self._tag = ''
self._map_type = MapType('image')
self._tile_format = TileFormat('PNG')
self._pyramid = Pyramid()
self._storage = NullClusterStorage()
self._renderer = MasonRenderer({})
def build(self):
if re.match('^[0-9].*', self._tag):
raise InvalidMapSheetTag(
'Tag of TileMatrix should not start with a number')
bundle = FormatBundle(self._map_type, self._tile_format)
if isinstance(self._storage, ClusterStorage):
map_sheet = ClusterMapSheet(
self._tag, bundle, self._pyramid, self._storage, self._renderer)
elif isinstance(self._storage, MetaTileStorageConcept):
map_sheet = MetaTileMapSheet(
self._tag, bundle, self._pyramid, self._storage, self._renderer)
else:
# Should not reach here
raise NotImplementedError
return map_sheet
def build_tag(self, tag):
assert isinstance(tag, six.string_types)
self._tag = tag
def build_pyramid(self, **config):
self._pyramid = Pyramid(**config)
def build_map_type(self, t):
assert isinstance(t, six.string_types)
self._map_type = MapType(t)
def build_tile_format(self, **config):
self._tile_format = TileFormat(**config)
def build_storage(self, **config):
bundle = FormatBundle(self._map_type, self._tile_format)
prototype = config.pop('prototype', 'null')
if prototype == 'null':
self._storage = NullClusterStorage()
elif prototype == 'disk':
self._storage = DiskClusterStorage(format=bundle, **config)
elif prototype == 's3':
self._storage = S3ClusterStorage(format=bundle, **config)
elif prototype == 'disk.metatile':
self._storage = DiskMetaTileStorage(format=bundle, **config)
elif prototype == 's3.metatile':
self._storage = S3MetaTileStorage(format=bundle, **config)
else:
raise UnknownStorageType(prototype)
def build_renderer(self, **config):
expression = config.get('layers')
if expression is None:
expression = dict()
self._renderer = MasonRenderer(expression)
class MapBookBuilder(object):
def __init__(self):
self._name = ''
self._metadata = Metadata()
self._map_sheets = OrderedDict()
def build(self):
return MapBook(
name=self._name,
metadata=self._metadata,
map_sheets=self._map_sheets
)
def build_name(self, name):
assert isinstance(name, six.string_types)
self._name = name
def build_metadata(self, **config):
self._metadata = Metadata(**config)
def add_map_sheet(self, map_sheet):
assert isinstance(map_sheet, MapSheet)
self._map_sheets[map_sheet.tag] = map_sheet
def create_map_book_from_theme(theme):
assert isinstance(theme, Theme)
builder = MapBookBuilder()
if theme.name is not None:
builder.build_name(theme.name)
if theme.metadata is not None:
builder.build_metadata(**theme.metadata)
for sheet_theme in theme.schemas:
assert isinstance(sheet_theme, SchemaTheme)
map_sheet_builder = MapSheetBuilder()
sheet_tag = 'tag-%s' % uuid.uuid4().hex
if sheet_theme.tag is not None:
sheet_tag = sheet_theme.tag
map_sheet_builder.build_tag(sheet_tag)
if sheet_theme.pyramid is not None:
map_sheet_builder.build_pyramid(**sheet_theme.pyramid)
if sheet_theme.maptype is not None:
map_sheet_builder.build_map_type(sheet_theme.maptype)
if sheet_theme.tileformat is not None:
map_sheet_builder.build_tile_format(**sheet_theme.tileformat)
if sheet_theme.storage is not None:
map_sheet_builder.build_storage(**sheet_theme.storage)
if sheet_theme.renderer is not None:
map_sheet_builder.build_renderer(**sheet_theme.renderer)
map_sheet = map_sheet_builder.build()
builder.add_map_sheet(map_sheet)
return builder.build()
| {
"content_hash": "2670ef62ecd991e0bc56801a3c39f734",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 88,
"avg_line_length": 32.30065359477124,
"alnum_prop": 0.6371914204775394,
"repo_name": "Kotaimen/stonemason",
"id": "086a8e5d39e3a8bc0a39bd38193670578ca0d5bc",
"size": "4969",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "stonemason/mason/builder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "188442"
},
{
"name": "Python",
"bytes": "448406"
}
],
"symlink_target": ""
} |
"""Implements contact form forms"""
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.conf import settings as django_settings
try:
import bleach
except ImportError:
raise 'django-cbv-contact-form application required bleach package'
try:
from captcha.fields import CaptchaField
except ImportError:
raise 'django-cbv-contact-form application required django-simple-captcha package'
try:
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Fieldset, Button, ButtonHolder, Submit
except ImportError:
raise 'django-cbv-contact-form application required django-crispy-forms package'
from contact_form.conf import settings
from contact_form.models import Message, Subject
class ContactForm(forms.ModelForm):
"""ContactForm form"""
if hasattr(django_settings, 'SITE_ID') and settings.CONTACT_FORM_USE_SITES:
queryset = Subject.objects.filter(site__id=django_settings.SITE_ID)
else:
queryset = Subject.objects.all()
'''
subject = forms.ModelChoiceField(queryset=queryset,
widget=forms.Select(),
label=_('Message subject'),
empty_label=_('Please select subject'),
error_messages={'required': _('Please select subject')})
'''
sender_name = forms.CharField(label=_('Your name'),
widget=forms.TextInput(
attrs={'maxlength': settings.CONTACT_FORM_SENDER_NAME_MAX_LENGTH}
),
error_messages={'required': _('Please enter your name')})
# maxlength is 254 characters for compliant with RFCs 3696 and 5321
sender_email = forms.EmailField(label=_('Your e-mail'),
widget=forms.TextInput(attrs={'maxlength': 254}),
error_messages={'required': _('Please enter your email.')})
message = forms.CharField(label=_('Your message'),
widget=forms.Textarea(attrs={'maxlength': settings.CONTACT_FORM_MESSAGE_MAX_LENGTH}),
min_length=settings.CONTACT_FORM_MESSAGE_MIN_LENGTH,
help_text=_('({0} chars minimum)').format(
settings.CONTACT_FORM_MESSAGE_MIN_LENGTH
),
error_messages={'required': _('Please enter your message'),
'min_length': _('Use at least {0} characters').format(
settings.CONTACT_FORM_MESSAGE_MIN_LENGTH
)})
def __init__(self, *args, **kwargs):
"""Form initialization method
:param args: form args
:param kwargs: form keyword args
"""
self.helper = FormHelper()
layout = Layout(
Fieldset(
_('Contact form'),
#'subject',
'sender_name',
'sender_email',
'message',
),
ButtonHolder(
Button('cancel', _('Cancel'), css_class='secondaryAction'),
Submit('submit', _('Submit'), css_class='primaryAction'),
)
)
self.helper.add_layout(layout)
self.helper.form_id = 'contact_form'
self.helper.form_action = ''
self.helper.form_method = 'POST'
self.helper.form_style = 'inline'
super(ContactForm, self).__init__(*args, **kwargs)
def clean_sender_name(self):
data = self.cleaned_data['sender_name']
if settings.CONTACT_FORM_FILTER_SENDER_NAME:
if len(data) != len(bleach.clean(data, tags=[], strip=True)):
raise forms.ValidationError(_('Not allowed characters in your name'))
return data
class Meta:
model = Message
fields = (
#'subject',
'sender_name',
'sender_email',
'message',
)
class ContactFormCaptcha(ContactForm):
"""ContactForm form with captcha"""
captcha = CaptchaField(label=_('Protection Code'),
error_messages={'required': _('Please enter protection code'),
'invalid': _('Invalid protection code')})
def __init__(self, *args, **kwargs):
"""Form initialization method
:param args: form args
:param kwargs: form keyword args
"""
self.helper = FormHelper()
layout = Layout(
Fieldset(
_('Contact form'),
#'subject',
'sender_name',
'sender_email',
'message',
'captcha',
),
ButtonHolder(
Button('cancel', _('Cancel'), css_class='secondaryAction'),
Submit('submit', _('Submit'), css_class='primaryAction'),
)
)
self.helper.add_layout(layout)
self.helper.form_id = 'contact_form'
self.helper.form_action = ''
self.helper.form_method = 'POST'
self.helper.form_style = 'inline'
super(ContactForm, self).__init__(*args, **kwargs)
| {
"content_hash": "0dde418d291dcaeb9133e636c431d7b7",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 115,
"avg_line_length": 38.23776223776224,
"alnum_prop": 0.5312728602779809,
"repo_name": "joebos/django-cbv-contact-form",
"id": "f34d7b5b6edb53400f4cc2fc24d85efe4a5c3a7d",
"size": "5468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contact_form/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "437"
},
{
"name": "Python",
"bytes": "31612"
}
],
"symlink_target": ""
} |
import itertools
def minmax(data):
'Computes the minimum and maximum values in one-pass using only 1.5*len(data) comparisons'
it = iter(data)
try:
lo = hi = next(it)
except StopIteration:
raise ValueError('minmax() arg is an empty sequence')
for x, y in itertools.izip_longest(it, it, fillvalue=lo):
if x > y:
x, y = y, x
if x < lo:
lo = x
if y > hi:
hi = y
return lo, hi
if __name__ == '__main__':
import random
data = [random.random() for i in range(1000)]
print minmax(data)
| {
"content_hash": "b5168c56d5a4331d6752df0a91536839",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 94,
"avg_line_length": 26.90909090909091,
"alnum_prop": 0.5523648648648649,
"repo_name": "ActiveState/code",
"id": "9da30a10c04bd576bcfcab637074e0a1269ad5cb",
"size": "592",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recipes/Python/577916_Fast_minmax_function/recipe-577916.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "35894"
},
{
"name": "C",
"bytes": "56048"
},
{
"name": "C++",
"bytes": "90880"
},
{
"name": "HTML",
"bytes": "11656"
},
{
"name": "Java",
"bytes": "57468"
},
{
"name": "JavaScript",
"bytes": "181218"
},
{
"name": "PHP",
"bytes": "250144"
},
{
"name": "Perl",
"bytes": "37296"
},
{
"name": "Perl 6",
"bytes": "9914"
},
{
"name": "Python",
"bytes": "17387779"
},
{
"name": "Ruby",
"bytes": "40233"
},
{
"name": "Shell",
"bytes": "190732"
},
{
"name": "Tcl",
"bytes": "674650"
}
],
"symlink_target": ""
} |
from KMCLib import *
import numpy
# Define the unit cell. We're going for hcp here, with c/a set for sphere-packing ratio for now.
cell_vectors = [[1.0,0.0,0.0],
[0.0,1.0,0.0],
[0.0,0.0,1.0]]
# I've set these up so that the Titaniums sit on the 1st and 3rd basis points, and the Oxygens on the others,
# where the octahedral interstitials are supposed to be
basis_points = [[0.0, 0.0, 0.0]]
unit_cell = KMCUnitCell(cell_vectors=cell_vectors,
basis_points=basis_points)
# Define the lattice.
xRep = 1
yRep = 1
zRep = 20
numPoints = xRep*(zRep+4)*yRep
lattice = KMCLattice(unit_cell=unit_cell,
repetitions=(xRep,yRep,zRep+4),
periodic=(False, False, True))
# Generate the initial types. I'm aiming to put Ti/Nb on the 1st and 3rd sites, and O/V on the 2nd and 4th,
# i.e. the Tis on the main sites with some substituted for Nbs, and vacancies on the interstitials except for a few Os.
types = ["V"]*numPoints
types[0] = "Bo"
types[1] = "Bo"
types[-2] = "To"
types[-1] = "To"
# Setup the configuration.
configuration = KMCConfiguration(lattice=lattice,
types=types,
possible_types=["O","V","To","Bo"])
# Use the _script() function to get a script that can generate the configuration.
print "from KMCLib import *"
print configuration._script()
| {
"content_hash": "a82169ae870317042a286d1bcc0d8ff8",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 119,
"avg_line_length": 35.275,
"alnum_prop": 0.6293408929836996,
"repo_name": "joshuahellier/PhDStuff",
"id": "c5aa0251967d66a06e4e38545bae105286e68207",
"size": "1411",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "codes/kmc/1dVersion/generateConfig.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "4177"
},
{
"name": "Fortran",
"bytes": "10357"
},
{
"name": "Mathematica",
"bytes": "729947"
},
{
"name": "Python",
"bytes": "786744"
},
{
"name": "Roff",
"bytes": "9"
},
{
"name": "Shell",
"bytes": "6313"
}
],
"symlink_target": ""
} |
import time
import string
import hashlib
import binascii
import logging.handlers
import codecs
import os.path
import datetime
import random
import mimetypes
import re
import magic
from itertools import groupby
import tg
import pylons
import webob.multidict
from formencode import Invalid
from tg.decorators import before_validate
from pylons import response
from pylons import tmpl_context as c
from paste.httpheaders import CACHE_CONTROL, EXPIRES
from webhelpers.html import literal
from webob import exc
from pygments.formatters import HtmlFormatter
from ew import jinja2_ew as ew
from ming.utils import LazyProperty
import pysvn
def permanent_redirect(url):
try:
tg.redirect(url)
except exc.HTTPFound, err:
raise exc.HTTPMovedPermanently(location=err.location)
def cache_forever():
headers = [
(k,v) for k,v in response.headers.items()
if k.lower() not in ('pragma', 'cache-control') ]
delta = CACHE_CONTROL.apply(
headers,
public=True,
max_age=60*60*24*365)
EXPIRES.update(headers, delta=delta)
response.headers.pop('cache-control', None)
response.headers.pop('pragma', None)
response.headers.update(headers)
class memoize_on_request(object):
def __init__(self, *key, **kwargs):
self.key = key
self.include_func_in_key = kwargs.pop(
'include_func_in_key', False)
assert not kwargs, 'Extra args'
def __call__(self, func):
def wrapper(*args, **kwargs):
cache = c.memoize_cache
if self.include_func_in_key:
key = (func, self.key, args, tuple(kwargs.iteritems()))
else:
key = (self.key, args, tuple(kwargs.iteritems()))
if key in cache:
result = cache[key]
else:
result = cache[key] = func(*args, **kwargs)
return result
wrapper.__name__ = 'wrap(%s)' % func.__name__
return wrapper
def guess_mime_type(filename):
'''Guess MIME type based on filename.
Applies heuristics, tweaks, and defaults in centralized manner.
'''
# Consider changing to strict=False
content_type = mimetypes.guess_type(filename, strict=True)
if content_type[0]:
content_type = content_type[0]
else:
content_type = 'application/octet-stream'
return content_type
class ConfigProxy(object):
'''Wrapper for loading config values at module-scope so we don't
have problems when a module is imported before tg.config is initialized
'''
def __init__(self, **kw):
self._kw = kw
def __getattr__(self, k):
return tg.config[self._kw[k]]
class lazy_logger(object):
'''Lazy instatiation of a logger, to ensure that it does not get
created before logging is configured (which would make it disabled)'''
def __init__(self, name):
self._name = name
@LazyProperty
def _logger(self):
return logging.getLogger(self._name)
def __getattr__(self, name):
if name.startswith('_'): raise AttributeError, name
return getattr(self._logger, name)
class TimedRotatingHandler(logging.handlers.BaseRotatingHandler):
def __init__(self, strftime_pattern):
self.pattern = strftime_pattern
self.last_filename = self.current_filename()
logging.handlers.BaseRotatingHandler.__init__(self, self.last_filename, 'a')
def current_filename(self):
return os.path.abspath(datetime.datetime.utcnow().strftime(self.pattern))
def shouldRollover(self, record):
'Inherited from BaseRotatingFileHandler'
return self.current_filename() != self.last_filename
def doRollover(self):
self.stream.close()
self.baseFilename = self.current_filename()
if self.encoding:
self.stream = codecs.open(self.baseFilename, 'w', self.encoding)
else:
self.stream = open(self.baseFilename, 'w')
class StatsHandler(TimedRotatingHandler):
fields=('action', 'action_type', 'tool_type', 'tool_mount', 'project', 'neighborhood',
'username', 'url', 'ip_address')
def __init__(self,
strftime_pattern,
module='allura',
page=1,
**kwargs):
self.page = page
self.module = module
TimedRotatingHandler.__init__(self, strftime_pattern)
def emit(self, record):
if not hasattr(record, 'action'):
return
kwpairs = dict(
module=self.module,
page=self.page)
for name in self.fields:
kwpairs[name] = getattr(record, name, None)
kwpairs.update(getattr(record, 'kwpairs', {}))
record.kwpairs = ','.join(
'%s=%s' % (k,v) for k,v in sorted(kwpairs.iteritems())
if v is not None)
record.exc_info = None # Never put tracebacks in the rtstats log
TimedRotatingHandler.emit(self, record)
def chunked_find(cls, query=None, pagesize=1024, sort_key=None, sort_dir=1):
if query is None: query = {}
page = 0
while True:
q = cls.query.find(query).skip(pagesize * page).limit(pagesize)
if sort_key:
q.sort(sort_key, sort_dir)
results = (q.all())
if not results: break
yield results
page += 1
def lsub_utf8(s, n):
'''Useful for returning n bytes of a UTF-8 string, rather than characters'''
while len(s) > n:
k = n
while (ord(s[k]) & 0xc0) == 0x80:
k -= 1
return s[:k]
return s
def chunked_list(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in xrange(0, len(l), n):
yield l[i:i+n]
def chunked_iter(iterable, max_size):
'''return iterable 'chunks' from the iterable of max size max_size'''
eiter = enumerate(iterable)
keyfunc = lambda (i,x): i//max_size
for _, chunk in groupby(eiter, keyfunc):
yield (x for i,x in chunk)
class AntiSpam(object):
'''Helper class for bot-protecting forms'''
honey_field_template=string.Template('''<p class="$honey_class">
<label for="$fld_id">You seem to have CSS turned off.
Please don't fill out this field.</label><br>
<input id="$fld_id" name="$fld_name" type="text"><br></p>''')
def __init__(self, request=None, num_honey=2):
self.num_honey = num_honey
if request is None or request.method == 'GET':
self.request = pylons.request
self.timestamp = int(time.time())
self.spinner = self.make_spinner()
self.timestamp_text = str(self.timestamp)
self.spinner_text = self._wrap(self.spinner)
else:
self.request = request
self.timestamp_text = request.params['timestamp']
self.spinner_text = request.params['spinner']
self.timestamp = int(self.timestamp_text)
self.spinner = self._unwrap(self.spinner_text)
self.spinner_ord = map(ord, self.spinner)
self.random_padding = [ random.randint(0,255) for x in self.spinner ]
self.honey_class = self.enc(self.spinner_text, css_safe=True)
# The counter is to ensure that multiple forms in the same page
# don't end up with the same id. Instead of doing:
#
# honey0, honey1
# which just relies on 0..num_honey we include a counter
# which is incremented every time extra_fields is called:
#
# honey00, honey 01, honey10, honey11
self.counter = 0
@staticmethod
def _wrap(s):
'''Encode a string to make it HTML id-safe (starts with alpha, includes
only digits, hyphens, underscores, colons, and periods). Luckily, base64
encoding doesn't use hyphens, underscores, colons, nor periods, so we'll
use these characters to replace its plus, slash, equals, and newline.
'''
tx_tbl = string.maketrans('+/', '-_')
s = binascii.b2a_base64(s)
s = s.rstrip('=\n')
s = s.translate(tx_tbl)
s = 'X' + s
return s
@staticmethod
def _unwrap(s):
tx_tbl = string.maketrans('-_', '+/')
s = s[1:]
s = str(s).translate(tx_tbl)
i = len(s) % 4
if i > 0:
s += '=' * (4 - i)
s = binascii.a2b_base64(s + '\n')
return s
def enc(self, plain, css_safe=False):
'''Stupid fieldname encryption. Not production-grade, but
hopefully "good enough" to stop spammers. Basically just an
XOR of the spinner with the unobfuscated field name
'''
# Plain starts with its length, includes the ordinals for its
# characters, and is padded with random data
plain = ([ len(plain) ]
+ map(ord, plain)
+ self.random_padding[:len(self.spinner_ord) - len(plain) - 1])
enc = ''.join(chr(p^s) for p, s in zip(plain, self.spinner_ord))
enc = self._wrap(enc)
if css_safe:
enc = ''.join(ch for ch in enc if ch.isalpha())
return enc
def dec(self, enc):
enc = self._unwrap(enc)
enc = list(map(ord, enc))
plain = [e^s for e,s in zip(enc, self.spinner_ord)]
plain = plain[1:1+plain[0]]
plain = ''.join(map(chr, plain))
return plain
def extra_fields(self):
yield ew.HiddenField(name='timestamp', value=self.timestamp_text).display()
yield ew.HiddenField(name='spinner', value=self.spinner_text).display()
for fldno in range(self.num_honey):
fld_name = self.enc('honey%d' % (fldno))
fld_id = self.enc('honey%d%d' % (self.counter, fldno))
yield literal(self.honey_field_template.substitute(
honey_class=self.honey_class,
fld_id=fld_id,
fld_name=fld_name))
self.counter += 1
def make_spinner(self, timestamp=None):
if timestamp is None: timestamp = self.timestamp
try:
client_ip = self.request.headers.get('X_FORWARDED_FOR', self.request.remote_addr)
client_ip = client_ip.split(',')[0].strip()
except (TypeError, AttributeError), err:
client_ip = '127.0.0.1'
plain = '%d:%s:%s' % (
timestamp, client_ip, pylons.config.get('spinner_secret', 'abcdef'))
return hashlib.sha1(plain).digest()
@classmethod
def validate_request(cls, request=None, now=None, params=None):
if request is None: request = pylons.request
if params is None: params = request.params
new_params = dict(params)
if not request.method == 'GET':
new_params.pop('timestamp', None)
new_params.pop('spinner', None)
obj = cls(request)
if now is None: now = time.time()
if obj.timestamp > now + 5:
raise ValueError, 'Post from the future'
if now - obj.timestamp > 60*60:
raise ValueError, 'Post from the 1hr+ past'
if obj.spinner != obj.make_spinner(obj.timestamp):
raise ValueError, 'Bad spinner value'
for k in new_params.keys():
new_params[obj.dec(k)] = new_params.pop(k)
for fldno in range(obj.num_honey):
value = new_params.pop('honey%s' % fldno)
if value:
raise ValueError, 'Value in honeypot field: %s' % value
return new_params
@classmethod
def validate(cls, error_msg):
'''Controller decorator to raise Invalid errors if bot protection is engaged'''
def antispam_hook(remainder, params):
'''Converts various errors in validate_request to a single Invalid message'''
try:
new_params = cls.validate_request(params=params)
params.update(new_params)
except (ValueError, TypeError, binascii.Error):
raise Invalid(error_msg, params, None)
return before_validate(antispam_hook)
class TruthyCallable(object):
'''
Wraps a callable to make it truthy in a boolean context.
Assumes the callable returns a truthy value and can be called with no args.
'''
def __init__(self, callable):
self.callable = callable
def __call__(self, *args, **kw):
return self.callable(*args, **kw)
def __nonzero__(self):
return self.callable()
class CaseInsensitiveDict(dict):
def __init__(self, *args, **kwargs):
super(CaseInsensitiveDict, self).__init__(*args, **kwargs)
self._reindex()
def _reindex(self):
items = self.items()
self.clear()
self._index = {}
for k,v in items:
self[k] = v
assert len(self) == len(items), 'Duplicate (case-insensitive) key'
def __getitem__(self, name):
return super(CaseInsensitiveDict, self).__getitem__(name.lower())
def __setitem__(self, name, value):
lname = name.lower()
super(CaseInsensitiveDict, self).__setitem__(lname, value)
self._index[lname] = name
def __delitem__(self, name):
super(CaseInsensitiveDict, self).__delitem__(name.lower())
def pop(self, k, *args):
return super(CaseInsensitiveDict, self).pop(k.lower(), *args)
def popitem(self):
k,v = super(CaseInsensitiveDict, self).popitem()
return self._index[k], v
def update(self, *args, **kwargs):
super(CaseInsensitiveDict, self).update(*args, **kwargs)
self._reindex()
def postmortem_hook(etype, value, tb): # pragma no cover
import sys, pdb, traceback
try:
from IPython.ipapi import make_session; make_session()
from IPython.Debugger import Pdb
sys.stderr.write('Entering post-mortem IPDB shell\n')
p = Pdb(color_scheme='Linux')
p.reset()
p.setup(None, tb)
p.print_stack_trace()
sys.stderr.write('%s: %s\n' % ( etype, value))
p.cmdloop()
p.forget()
# p.interaction(None, tb)
except ImportError:
sys.stderr.write('Entering post-mortem PDB shell\n')
traceback.print_exception(etype, value, tb)
pdb.post_mortem(tb)
class LineAnchorCodeHtmlFormatter(HtmlFormatter):
def _wrap_pre(self, inner):
style = []
if self.prestyles:
style.append(self.prestyles)
if self.noclasses:
style.append('line-height: 125%')
style = '; '.join(style)
num = self.linenostart
yield 0, ('<pre' + (style and ' style="%s"' % style) + '>')
for tup in inner:
yield (tup[0], '<div id="l%s" class="code_block">%s</div>' % (num, tup[1]))
num += 1
yield 0, '</pre>'
def generate_code_stats(blob):
stats = {'line_count': 0,
'code_size': 0,
'data_line_count': 0}
code = blob.text
lines = code.split('\n')
stats['code_size'] = blob.size
stats['line_count'] = len(lines)
spaces = re.compile(r'^\s*$')
stats['data_line_count'] = sum([1 for l in lines if not spaces.match(l)])
return stats
def svn_path_exists(path):
svn = pysvn.Client()
try:
svn.info2(path)
return True
except pysvn.ClientError, e:
return False
def is_text_file(file):
msg = magic.from_buffer(file[:1024])
if ("text" in msg) or ("empty" in msg):
return True
return False
| {
"content_hash": "18c8a76131dd5c1f62cc6fc9c28eb906",
"timestamp": "",
"source": "github",
"line_count": 448,
"max_line_length": 93,
"avg_line_length": 34.533482142857146,
"alnum_prop": 0.5920754960894576,
"repo_name": "pombredanne/SourceForge-Allura",
"id": "38383c507b0b78c5ef82d64975185f1360d38065",
"size": "15471",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Allura/allura/lib/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.