text stringlengths 4 1.02M | meta dict |
|---|---|
"""Windows platform implementation."""
import errno
import os
import sys
import platform
import warnings
import _psutil_mswindows
from _psutil_mswindows import ERROR_ACCESS_DENIED
from psutil._error import AccessDenied, NoSuchProcess, TimeoutExpired
from psutil._common import *
from psutil._compat import PY3, xrange, long, wraps
# Windows specific extended namespace
__extra__all__ = ["ABOVE_NORMAL_PRIORITY_CLASS", "BELOW_NORMAL_PRIORITY_CLASS",
"HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS",
"NORMAL_PRIORITY_CLASS", "REALTIME_PRIORITY_CLASS"]
# --- module level constants (gets pushed up to psutil module)
# Since these constants get determined at import time we do not want to
# crash immediately; instead we'll set them to None and most likely
# we'll crash later as they're used for determining process CPU stats
# and creation_time
try:
NUM_CPUS = _psutil_mswindows.get_num_cpus()
except Exception:
NUM_CPUS = None
warnings.warn("couldn't determine platform's NUM_CPUS", RuntimeWarning)
try:
BOOT_TIME = _psutil_mswindows.get_system_boot_time()
except Exception:
BOOT_TIME = None
warnings.warn("couldn't determine platform's BOOT_TIME", RuntimeWarning)
try:
TOTAL_PHYMEM = _psutil_mswindows.get_virtual_mem()[0]
except Exception:
TOTAL_PHYMEM = None
warnings.warn("couldn't determine platform's TOTAL_PHYMEM", RuntimeWarning)
WAIT_TIMEOUT = 0x00000102 # 258 in decimal
ACCESS_DENIED_SET = frozenset([errno.EPERM, errno.EACCES, ERROR_ACCESS_DENIED])
# process priority constants:
# http://msdn.microsoft.com/en-us/library/ms686219(v=vs.85).aspx
from _psutil_mswindows import (ABOVE_NORMAL_PRIORITY_CLASS,
BELOW_NORMAL_PRIORITY_CLASS,
HIGH_PRIORITY_CLASS,
IDLE_PRIORITY_CLASS,
NORMAL_PRIORITY_CLASS,
REALTIME_PRIORITY_CLASS,
INFINITE)
@memoize
def _win32_QueryDosDevice(s):
return _psutil_mswindows.win32_QueryDosDevice(s)
def _convert_raw_path(s):
# convert paths using native DOS format like:
# "\Device\HarddiskVolume1\Windows\systemew\file.txt"
# into: "C:\Windows\systemew\file.txt"
if PY3 and not isinstance(s, str):
s = s.decode('utf8')
rawdrive = '\\'.join(s.split('\\')[:3])
driveletter = _win32_QueryDosDevice(rawdrive)
return os.path.join(driveletter, s[len(rawdrive):])
# --- public functions
get_system_boot_time = _psutil_mswindows.get_system_boot_time
nt_virtmem_info = namedtuple('vmem', ' '.join([
# all platforms
'total', 'available', 'percent', 'used', 'free']))
def virtual_memory():
"""System virtual memory as a namedtuple."""
mem = _psutil_mswindows.get_virtual_mem()
totphys, availphys, totpagef, availpagef, totvirt, freevirt = mem
#
total = totphys
avail = availphys
free = availphys
used = total - avail
percent = usage_percent((total - avail), total, _round=1)
return nt_virtmem_info(total, avail, percent, used, free)
def swap_memory():
"""Swap system memory as a (total, used, free, sin, sout) tuple."""
mem = _psutil_mswindows.get_virtual_mem()
total = mem[2]
free = mem[3]
used = total - free
percent = usage_percent(used, total, _round=1)
return nt_swapmeminfo(total, used, free, percent, 0, 0)
def get_disk_usage(path):
"""Return disk usage associated with path."""
try:
total, free = _psutil_mswindows.get_disk_usage(path)
except WindowsError:
err = sys.exc_info()[1]
if not os.path.exists(path):
raise OSError(errno.ENOENT, "No such file or directory: '%s'" % path)
raise
used = total - free
percent = usage_percent(used, total, _round=1)
return nt_diskinfo(total, used, free, percent)
def disk_partitions(all):
"""Return disk partitions."""
rawlist = _psutil_mswindows.get_disk_partitions(all)
return [nt_partition(*x) for x in rawlist]
_cputimes_ntuple = namedtuple('cputimes', 'user system idle')
def get_system_cpu_times():
"""Return system CPU times as a named tuple."""
user, system, idle = 0, 0, 0
# computes system global times summing each processor value
for cpu_time in _psutil_mswindows.get_system_cpu_times():
user += cpu_time[0]
system += cpu_time[1]
idle += cpu_time[2]
return _cputimes_ntuple(user, system, idle)
def get_system_per_cpu_times():
"""Return system per-CPU times as a list of named tuples."""
ret = []
for cpu_t in _psutil_mswindows.get_system_cpu_times():
user, system, idle = cpu_t
item = _cputimes_ntuple(user, system, idle)
ret.append(item)
return ret
def get_system_users():
"""Return currently connected users as a list of namedtuples."""
retlist = []
rawlist = _psutil_mswindows.get_system_users()
for item in rawlist:
user, hostname, tstamp = item
nt = nt_user(user, None, hostname, tstamp)
retlist.append(nt)
return retlist
get_pid_list = _psutil_mswindows.get_pid_list
pid_exists = _psutil_mswindows.pid_exists
network_io_counters = _psutil_mswindows.get_network_io_counters
disk_io_counters = _psutil_mswindows.get_disk_io_counters
# --- decorator
def wrap_exceptions(fun):
"""Decorator which translates bare OSError and WindowsError
exceptions into NoSuchProcess and AccessDenied.
"""
@wraps(fun)
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except OSError:
err = sys.exc_info()[1]
if err.errno in ACCESS_DENIED_SET:
raise AccessDenied(self.pid, self._process_name)
if err.errno == errno.ESRCH:
raise NoSuchProcess(self.pid, self._process_name)
raise
return wrapper
class Process(object):
"""Wrapper class around underlying C implementation."""
__slots__ = ["pid", "_process_name"]
def __init__(self, pid):
self.pid = pid
self._process_name = None
@wrap_exceptions
def get_process_name(self):
"""Return process name as a string of limited len (15)."""
return _psutil_mswindows.get_process_name(self.pid)
@wrap_exceptions
def get_process_exe(self):
# Note: os.path.exists(path) may return False even if the file
# is there, see:
# http://stackoverflow.com/questions/3112546/os-path-exists-lies
return _convert_raw_path(_psutil_mswindows.get_process_exe(self.pid))
@wrap_exceptions
def get_process_cmdline(self):
"""Return process cmdline as a list of arguments."""
return _psutil_mswindows.get_process_cmdline(self.pid)
@wrap_exceptions
def get_process_ppid(self):
"""Return process parent pid."""
return _psutil_mswindows.get_process_ppid(self.pid)
def _get_raw_meminfo(self):
try:
return _psutil_mswindows.get_process_memory_info(self.pid)
except OSError:
err = sys.exc_info()[1]
if err.errno in ACCESS_DENIED_SET:
return _psutil_mswindows.get_process_memory_info_2(self.pid)
raise
@wrap_exceptions
def get_memory_info(self):
"""Returns a tuple or RSS/VMS memory usage in bytes."""
# on Windows RSS == WorkingSetSize and VSM == PagefileUsage
# fields of PROCESS_MEMORY_COUNTERS struct:
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms684877(v=vs.85).aspx
t = self._get_raw_meminfo()
return nt_meminfo(t[2], t[7])
_nt_ext_mem = namedtuple('meminfo',
' '.join(['num_page_faults',
'peak_wset',
'wset',
'peak_paged_pool',
'paged_pool',
'peak_nonpaged_pool',
'nonpaged_pool',
'pagefile',
'peak_pagefile',
'private',]))
@wrap_exceptions
def get_ext_memory_info(self):
return self._nt_ext_mem(*self._get_raw_meminfo())
nt_mmap_grouped = namedtuple('mmap', 'path rss')
nt_mmap_ext = namedtuple('mmap', 'addr perms path rss')
def get_memory_maps(self):
try:
raw = _psutil_mswindows.get_process_memory_maps(self.pid)
except OSError:
# XXX - can't use wrap_exceptions decorator as we're
# returning a generator; probably needs refactoring.
err = sys.exc_info()[1]
if err.errno in (errno.EPERM, errno.EACCES, ERROR_ACCESS_DENIED):
raise AccessDenied(self.pid, self._process_name)
if err.errno == errno.ESRCH:
raise NoSuchProcess(self.pid, self._process_name)
raise
else:
for addr, perm, path, rss in raw:
path = _convert_raw_path(path)
addr = hex(addr)
yield (addr, perm, path, rss)
@wrap_exceptions
def kill_process(self):
"""Terminates the process with the given PID."""
return _psutil_mswindows.kill_process(self.pid)
@wrap_exceptions
def process_wait(self, timeout=None):
if timeout is None:
timeout = INFINITE
else:
# WaitForSingleObject() expects time in milliseconds
timeout = int(timeout * 1000)
ret = _psutil_mswindows.process_wait(self.pid, timeout)
if ret == WAIT_TIMEOUT:
raise TimeoutExpired(self.pid, self._process_name)
return ret
@wrap_exceptions
def get_process_username(self):
"""Return the name of the user that owns the process"""
if self.pid in (0, 4):
return 'NT AUTHORITY\\SYSTEM'
return _psutil_mswindows.get_process_username(self.pid)
@wrap_exceptions
def get_process_create_time(self):
# special case for kernel process PIDs; return system boot time
if self.pid in (0, 4):
return BOOT_TIME
try:
return _psutil_mswindows.get_process_create_time(self.pid)
except OSError:
err = sys.exc_info()[1]
if err.errno in ACCESS_DENIED_SET:
return _psutil_mswindows.get_process_create_time_2(self.pid)
raise
@wrap_exceptions
def get_process_num_threads(self):
return _psutil_mswindows.get_process_num_threads(self.pid)
@wrap_exceptions
def get_process_threads(self):
rawlist = _psutil_mswindows.get_process_threads(self.pid)
retlist = []
for thread_id, utime, stime in rawlist:
ntuple = nt_thread(thread_id, utime, stime)
retlist.append(ntuple)
return retlist
@wrap_exceptions
def get_cpu_times(self):
try:
ret = _psutil_mswindows.get_process_cpu_times(self.pid)
except OSError:
err = sys.exc_info()[1]
if err.errno in ACCESS_DENIED_SET:
ret = _psutil_mswindows.get_process_cpu_times_2(self.pid)
else:
raise
return nt_cputimes(*ret)
@wrap_exceptions
def suspend_process(self):
return _psutil_mswindows.suspend_process(self.pid)
@wrap_exceptions
def resume_process(self):
return _psutil_mswindows.resume_process(self.pid)
@wrap_exceptions
def get_process_cwd(self):
if self.pid in (0, 4):
raise AccessDenied(self.pid, self._process_name)
# return a normalized pathname since the native C function appends
# "\\" at the and of the path
path = _psutil_mswindows.get_process_cwd(self.pid)
return os.path.normpath(path)
@wrap_exceptions
def get_open_files(self):
if self.pid in (0, 4):
return []
retlist = []
# Filenames come in in native format like:
# "\Device\HarddiskVolume1\Windows\systemew\file.txt"
# Convert the first part in the corresponding drive letter
# (e.g. "C:\") by using Windows's QueryDosDevice()
raw_file_names = _psutil_mswindows.get_process_open_files(self.pid)
for file in raw_file_names:
file = _convert_raw_path(file)
if isfile_strict(file) and file not in retlist:
ntuple = nt_openfile(file, -1)
retlist.append(ntuple)
return retlist
@wrap_exceptions
def get_connections(self, kind='inet'):
if kind not in conn_tmap:
raise ValueError("invalid %r kind argument; choose between %s"
% (kind, ', '.join([repr(x) for x in conn_tmap])))
families, types = conn_tmap[kind]
ret = _psutil_mswindows.get_process_connections(self.pid, families, types)
return [nt_connection(*conn) for conn in ret]
@wrap_exceptions
def get_process_nice(self):
return _psutil_mswindows.get_process_priority(self.pid)
@wrap_exceptions
def set_process_nice(self, value):
return _psutil_mswindows.set_process_priority(self.pid, value)
# available on Windows >= Vista
if hasattr(_psutil_mswindows, "get_process_io_priority"):
@wrap_exceptions
def get_process_ionice(self):
return _psutil_mswindows.get_process_io_priority(self.pid)
@wrap_exceptions
def set_process_ionice(self, value, _):
if _:
raise TypeError("set_process_ionice() on Windows takes only " \
"1 argument (2 given)")
if value not in (2, 1, 0):
raise ValueError("value must be 2 (normal), 1 (low) or 0 " \
"(very low); got %r" % value)
return _psutil_mswindows.set_process_io_priority(self.pid, value)
@wrap_exceptions
def get_process_io_counters(self):
try:
ret = _psutil_mswindows.get_process_io_counters(self.pid)
except OSError:
err = sys.exc_info()[1]
if err.errno in ACCESS_DENIED_SET:
ret = _psutil_mswindows.get_process_io_counters_2(self.pid)
else:
raise
return nt_io(*ret)
@wrap_exceptions
def get_process_status(self):
suspended = _psutil_mswindows.is_process_suspended(self.pid)
if suspended:
return STATUS_STOPPED
else:
return STATUS_RUNNING
@wrap_exceptions
def get_process_cpu_affinity(self):
from_bitmask = lambda x: [i for i in xrange(64) if (1 << i) & x]
bitmask = _psutil_mswindows.get_process_cpu_affinity(self.pid)
return from_bitmask(bitmask)
@wrap_exceptions
def set_process_cpu_affinity(self, value):
def to_bitmask(l):
if not l:
raise ValueError("invalid argument %r" % l)
out = 0
for b in l:
out |= 2**b
return out
# SetProcessAffinityMask() states that ERROR_INVALID_PARAMETER
# is returned for an invalid CPU but this seems not to be true,
# therefore we check CPUs validy beforehand.
allcpus = list(range(len(get_system_per_cpu_times())))
for cpu in value:
if cpu not in allcpus:
raise ValueError("invalid CPU %r" % cpu)
bitmask = to_bitmask(value)
_psutil_mswindows.set_process_cpu_affinity(self.pid, bitmask)
@wrap_exceptions
def get_num_handles(self):
try:
return _psutil_mswindows.get_process_num_handles(self.pid)
except OSError:
err = sys.exc_info()[1]
if err.errno in ACCESS_DENIED_SET:
return _psutil_mswindows.get_process_num_handles_2(self.pid)
raise
@wrap_exceptions
def get_num_ctx_switches(self):
return nt_ctxsw(*_psutil_mswindows.get_process_num_ctx_switches(self.pid))
| {
"content_hash": "8ccd37ac133dac4f2f7ec25a340f7648",
"timestamp": "",
"source": "github",
"line_count": 449,
"max_line_length": 88,
"avg_line_length": 35.51893095768374,
"alnum_prop": 0.6100451467268623,
"repo_name": "CaiZhongda/psutil",
"id": "f640d492ac2e3150852e54128f7e387f8c22b591",
"size": "16147",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "psutil/_psmswindows.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "261753"
},
{
"name": "C++",
"bytes": "10088"
},
{
"name": "Python",
"bytes": "303481"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('scim', '0003_auto_20221031_1713'),
]
operations = [
migrations.RemoveField(
model_name='scimplatformsettings',
name='segment_settings',
),
migrations.AddField(
model_name='scimsegmentsetting',
name='settings',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='scim.SCIMPlatformSettings'),
preserve_default=False,
),
]
| {
"content_hash": "9ef32517810de7f274cb5033b1294705",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 127,
"avg_line_length": 28.136363636363637,
"alnum_prop": 0.617124394184168,
"repo_name": "onepercentclub/bluebottle",
"id": "d705c4cc3ab8eaebe4b564ffed8c43d970f1cc47",
"size": "669",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bluebottle/scim/migrations/0004_auto_20221102_0950.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "41694"
},
{
"name": "HTML",
"bytes": "246695"
},
{
"name": "Handlebars",
"bytes": "63"
},
{
"name": "JavaScript",
"bytes": "139123"
},
{
"name": "PHP",
"bytes": "35"
},
{
"name": "PLpgSQL",
"bytes": "1369882"
},
{
"name": "PostScript",
"bytes": "2927"
},
{
"name": "Python",
"bytes": "4983116"
},
{
"name": "Rich Text Format",
"bytes": "39109"
},
{
"name": "SCSS",
"bytes": "99555"
},
{
"name": "Shell",
"bytes": "3068"
},
{
"name": "Smarty",
"bytes": "3814"
}
],
"symlink_target": ""
} |
import os.path
import riak
import django_riak
import pwd
import os
import os.path
import yaml
# Django settings for riagi project.
if pwd.getpwuid(os.getuid())[0] == "dotcloud":
RIAK_HOST = "riak01.riagi.com"
RIAK_PORT = "8098"
DEBUG = False
elif os.path.isfile('/etc/riagi/riak_lb.yaml'):
doc = open('/etc/riagi/riak_lb.yaml', 'r').read()
settings = yaml.load(doc)
RIAK_HOST = settings['loadbalancer']['hostname']
RIAK_PORT = settings['loadbalancer']['port']
DEBUG = False
MEDIA_URL = settings['media']['url']
MEDIA_ROOT = settings['media']['root']
STATIC_URL = settings['static']['url']
STATIC_ROOT = settings['static']['root']
else:
RIAK_HOST = "127.0.0.1"
RIAK_PORT = "8087"
DEBUG = True
MEDIA_URL = ''
STATIC_URL = '/static/'
STATIC_ROOT = ''
MEDIA_ROOT = ''
TEMPLATE_DEBUG = DEBUG
RIAK_PROTOCOL = "http"
RIAK_USERS_BUCKET = "riagi-users"
RIAK_IMAGE_BUCKET = "riagi-images"
RIAK_THUMBS_BUCKET = "riagi-thumbs"
RIAK_METADATA_BUCKET = "riagi-image-metadata"
FILE_UPLOAD_MAX_MEMORY_SIZE = 0
RIAK_TRANSPORT_CLASS = riak.RiakHttpTransport
RIAK_BUCKET = 'django-riak-sessions'
SESSION_ENGINE = "django_riak"
APPEND_SLASH = False
TIME_ZONE = 'Europe/London'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = False
USE_L10N = False
MEDIA_URL = ''
STATIC_URL = '/static/'
STATIC_ROOT = ''
MEDIA_ROOT = ''
STATICFILES_DIRS = (
os.path.join(os.path.dirname(__file__), "static"),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
SECRET_KEY = ')^)dn9@fe=7=7lurgj#$r9)h$1y9h*zc@kzdur46nd6l@qe%)!'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'riagi.urls'
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), "templates")
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'riagi.images',
'riagi.users'
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
TEMPLATE_CONTEXT_PROCESSORS = ("django.core.context_processors.debug",
"django.core.context_processors.i18n", "django.core.context_processors.media",
'django.core.context_processors.request',)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| {
"content_hash": "bea4e34a27779dafabbfaef3aa59493e",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 78,
"avg_line_length": 26.095238095238095,
"alnum_prop": 0.6614963503649635,
"repo_name": "jsmartin/riagi",
"id": "201a1f9dd49f1f868e093527e8c0cb7910274e33",
"size": "3288",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "riagi/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "49641"
},
{
"name": "Python",
"bytes": "22299"
}
],
"symlink_target": ""
} |
from manage_cloudant import authenticate_cloudant_instance, run_ask_runs
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Generate New Cloudant API key')
parser.add_argument('--username', dest='username', required=True,
help='Cloudant username')
parser.add_argument('--databases', dest='databases', nargs='*')
parser.add_argument('--admin', dest='admin', nargs='?', const=True, default=False)
args = parser.parse_args()
cloudant_instance = authenticate_cloudant_instance(args.username)
api_key, api_password = cloudant_instance.generate_api_key().ask_and_run()
print('New API key generated.')
print('API Key:', api_key)
print('API Password:', api_password)
ask_runs = []
for database in args.databases:
ask_runs.append(cloudant_instance.get_db(database)
.grant_api_key_access(api_key, admin=args.admin))
run_ask_runs(ask_runs)
| {
"content_hash": "83785b309baab04426f6d64e7a7c52e0",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 86,
"avg_line_length": 37.92307692307692,
"alnum_prop": 0.6561866125760649,
"repo_name": "dimagi/commcare-hq",
"id": "159d91c740d49fd98f8f1b53c3f74ac89c16624f",
"size": "986",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/cloudant/generate_api_key.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "82928"
},
{
"name": "Dockerfile",
"bytes": "2341"
},
{
"name": "HTML",
"bytes": "2589268"
},
{
"name": "JavaScript",
"bytes": "5889543"
},
{
"name": "Jinja",
"bytes": "3693"
},
{
"name": "Less",
"bytes": "176180"
},
{
"name": "Makefile",
"bytes": "1622"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "66704"
},
{
"name": "Python",
"bytes": "21779773"
},
{
"name": "Roff",
"bytes": "150"
},
{
"name": "Shell",
"bytes": "67473"
}
],
"symlink_target": ""
} |
from django.shortcuts import render, redirect, get_object_or_404
from models import Contact, Group
from django.contrib import messages
from forms import ContactForm, GroupForm, ImportUploadForm
from django.contrib.auth.decorators import login_required
import csv
@login_required
def index(request):
""" Contacts list. """
contacts = Contact.objects.filter(owner=request.user)
return render(request, "contacts/index.html", {"contacts": contacts})
@login_required
def create_contact(request):
""" Create a new contact. """
contact_form = ContactForm()
if request.method == "POST":
contact_form = ContactForm(request.POST)
if contact_form.is_valid():
contact = contact_form.save(commit=False)
contact.owner = request.user
contact.save()
messages.success(request, "%s added" % contact.name)
return redirect("view_contact", contact.id)
return render(request, "contacts/create.html", {"form": contact_form})
@login_required
def view_contact(request, pk):
""" View a contact. """
contact = get_object_or_404(Contact, pk=pk, owner=request.user)
return render(request, "contacts/view.html", {"contact": contact})
@login_required
def edit_contact(request, pk):
""" Edit a contact. """
contact = get_object_or_404(Contact, pk=pk, owner=request.user)
contact_form = ContactForm(instance=contact)
if request.method == "POST":
contact_form = ContactForm(request.POST, instance=contact)
if contact_form.is_valid():
contact = contact_form.save()
messages.success(request, "%s updated" % contact.name)
return redirect("view_contact", contact.id)
return render(request, "contacts/edit.html", {"contact": contact, "form": contact_form})
@login_required
def delete_contact(request, pk):
""" Delete a contact. """
contact = get_object_or_404(Contact, pk=pk, owner=request.user)
if request.method == "POST":
# If we post then we will delete the user
contact.delete()
messages.success(request, "%s deleted" % contact.name)
return redirect("contacts")
return render(request, "contacts/confirm_delete.html", {"contact": contact})
@login_required
def groups(request):
""" Group list. """
groups = Group.objects.filter(owner=request.user)
return render(request, "contacts/groups/index.html", {"groups": groups})
@login_required
def create_group(request):
""" Create a new group. """
group_form = GroupForm()
if request.method == "POST":
group_form = GroupForm(request.POST)
if group_form.is_valid():
group = group_form.save(commit=False)
group.owner = request.user
group.save()
messages.success(request, "%s added" % group.name)
return redirect("view_group", group.id)
return render(request, "contacts/groups/create.html", {"form": group_form})
@login_required
def view_group(request, pk):
""" View a group. """
group = get_object_or_404(Group, pk=pk, owner=request.user)
return render(request, "contacts/groups/view.html", {"group": group})
@login_required
def edit_group(request, pk):
""" Edit a group. """
group = get_object_or_404(Group, pk=pk, owner=request.user)
group_form = GroupForm(instance=group)
if request.method == "POST":
group_form = GroupForm(request.POST, instance=group)
if group_form.is_valid():
group = group_form.save()
messages.success(request, "%s updated" % group.name)
return redirect("view_group", group.id)
return render(request, "contacts/groups/edit.html", {"group": group, "form": group_form})
@login_required
def delete_group(request, pk):
""" Delete a group. """
group = get_object_or_404(Group, pk=pk, owner=request.user)
if request.method == "POST":
# If we post then we will delete the group
group.delete()
messages.success(request, "%s deleted" % group.name)
return redirect("groups")
return render(request, "contacts/groups/confirm_delete.html", {"group": group})
@login_required
def import_contacts(request):
""" Import contacts from CSV. """
import_upload_form = ImportUploadForm()
errors = []
if request.method == "POST":
import_upload_form = ImportUploadForm(request.POST, request.FILES)
if import_upload_form.is_valid():
data = csv.DictReader(request.FILES['file'].read().splitlines())
datalist = []
for i, row in enumerate(data):
datalist.append(row)
for r in Contact.REQUIRED_FIELDS:
if row.get(r, "").strip() == "":
# Required field missing - though we might already have this field if the contact already exists
errors.append("Error on line " + str(i + 1) + ": missing required field " + r)
if len(errors) == 0:
request.session['imported_contacts'] = datalist
return redirect("import_contacts_confirm")
return render(request, "contacts/import/upload.html", {"form": import_upload_form, "errors": errors})
@login_required
def import_contacts_confirm(request):
""" Confirm import of contacts from CSV. """
if request.method == "POST":
for contact_info in request.session['imported_contacts']:
contact, created = Contact.objects.get_or_create(email=contact_info['email'], owner=request.user)
for attr in Contact.REQUIRED_FIELDS:
setattr(contact, attr, contact_info[attr])
fields = contact.fields
for attr in [c for c in contact_info.keys() if not c in Contact.REQUIRED_FIELDS]:
contact.fields[attr] = contact_info[attr]
contact.save()
messages.success(request, "%s contacts imported" % len(request.session['imported_contacts']))
del request.session['imported_contacts']
return redirect("contacts")
contacts = request.session['imported_contacts']
keys = set(reduce(lambda x, y: x + y, [c.keys() for c in contacts]))
keys = sorted(keys, key=lambda x : 1 if x == "email" else 2 if x == "first_name" else 3 if x == "last_name" else 4)
return render(request, "contacts/import/confirm.html", {"keys": keys, "contacts": contacts}) | {
"content_hash": "27d9a1d2ddd11ccea7117fee083c3e41",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 120,
"avg_line_length": 41.006410256410255,
"alnum_prop": 0.6374863217133031,
"repo_name": "jscott1989/holloway",
"id": "583ab3a110f66a5ccdc2ff885c4862bfb204218c",
"size": "6397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contacts/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "41031"
},
{
"name": "CoffeeScript",
"bytes": "2372"
},
{
"name": "Python",
"bytes": "33372"
},
{
"name": "Shell",
"bytes": "364"
}
],
"symlink_target": ""
} |
from django.forms import ModelForm, forms, DateInput
from appPonto.models import *
from django import forms
class FuncionarioForm(ModelForm):
senha = forms.CharField(label='Senha', widget=forms.PasswordInput, required=False)
class Meta:
model = Funcionario
fields = (
'nome', 'matricula', 'cpf', 'dataNascimento', 'telefone', 'cargo', 'Email', 'sexo', 'salario',
'foto', 'situacao', 'senha')
class DepartamentoForm(ModelForm):
class Meta:
model = Departamento
fields = ('descricao',)
class CargoForm(ModelForm):
class Meta:
model = Cargo
fields = ('nome_funcao','departamento')
class HorarioForm(ModelForm):
class Meta:
model = Horario
fields = ('cargahoraria', 'dias', 'pessoa')
class DiasSemExpedienteForm(ModelForm):
class Meta:
model = DiasSemExpediente
fields = ('descricao','data') | {
"content_hash": "501ac2883a197f6451fc1e8ddce94375",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 102,
"avg_line_length": 29.516129032258064,
"alnum_prop": 0.6459016393442623,
"repo_name": "Ednilsonpalhares/SCEFA",
"id": "ec83745e7418943106d37b1a60cf34d81007fcb1",
"size": "915",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "appPonto/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "42663"
},
{
"name": "HTML",
"bytes": "158928"
},
{
"name": "JavaScript",
"bytes": "326301"
},
{
"name": "Python",
"bytes": "368956"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, FC
class SimpleImgConvPool(fluid.dygraph.Layer):
def __init__(self,
name_scope,
num_filters,
filter_size,
pool_size,
pool_stride,
pool_padding=0,
pool_type='max',
global_pooling=False,
conv_stride=1,
conv_padding=0,
conv_dilation=1,
conv_groups=1,
act=None,
use_cudnn=False,
dtype='float32',
param_attr=None,
bias_attr=None):
super(SimpleImgConvPool, self).__init__(name_scope)
self._conv2d = Conv2D(
self.full_name(),
num_filters=num_filters,
filter_size=filter_size,
stride=conv_stride,
padding=conv_padding,
dilation=conv_dilation,
groups=conv_groups,
param_attr=param_attr,
bias_attr=bias_attr,
use_cudnn=use_cudnn,
dtype=dtype,
act=act)
self._pool2d = Pool2D(
self.full_name(),
pool_size=pool_size,
pool_type=pool_type,
pool_stride=pool_stride,
pool_padding=pool_padding,
global_pooling=global_pooling,
use_cudnn=use_cudnn)
def forward(self, inputs):
x = self._conv2d(inputs)
x = self._pool2d(x)
return x
class MNIST(fluid.dygraph.Layer):
def __init__(self, name_scope, dtype="float32"):
super(MNIST, self).__init__(name_scope)
self._simple_img_conv_pool_1 = SimpleImgConvPool(
self.full_name(),
num_filters=20,
filter_size=5,
pool_size=2,
pool_stride=2,
act="relu",
dtype=dtype,
use_cudnn=True)
self._simple_img_conv_pool_2 = SimpleImgConvPool(
self.full_name(),
num_filters=50,
filter_size=5,
pool_size=2,
pool_stride=2,
act="relu",
dtype=dtype,
use_cudnn=True)
pool_2_shape = 50 * 4 * 4
SIZE = 10
scale = (2.0 / (pool_2_shape**2 * SIZE))**0.5
self._fc = FC(self.full_name(),
10,
param_attr=fluid.param_attr.ParamAttr(
initializer=fluid.initializer.NormalInitializer(
loc=0.0, scale=scale)),
act="softmax",
dtype=dtype)
def forward(self, inputs, label):
x = self._simple_img_conv_pool_1(inputs)
x = self._simple_img_conv_pool_2(x)
cost = self._fc(x)
loss = fluid.layers.cross_entropy(cost, label)
avg_loss = fluid.layers.mean(loss)
return avg_loss
class TestMnist(unittest.TestCase):
# FIXME(zcd): disable this random failed test temporally.
@unittest.skip("should fix this later")
def test_mnist_fp16(self):
if not fluid.is_compiled_with_cuda():
return
x = np.random.randn(1, 3, 224, 224).astype("float16")
y = np.random.randn(1, 1).astype("int64")
with fluid.dygraph.guard(fluid.CUDAPlace(0)):
model = MNIST("mnist", dtype="float16")
x = fluid.dygraph.to_variable(x)
y = fluid.dygraph.to_variable(y)
loss = model(x, y)
print(loss.numpy())
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "b71d638834d8fdf168748d1eb5435c56",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 74,
"avg_line_length": 30.60330578512397,
"alnum_prop": 0.5031055900621118,
"repo_name": "tensor-tang/Paddle",
"id": "1c72a41411e0bc23973ae4c69bcd75a192cce5ad",
"size": "4314",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/test_dygraph_mnist_fp16.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "32490"
},
{
"name": "C++",
"bytes": "10161819"
},
{
"name": "CMake",
"bytes": "290828"
},
{
"name": "Cuda",
"bytes": "1183095"
},
{
"name": "Dockerfile",
"bytes": "10002"
},
{
"name": "Python",
"bytes": "7082088"
},
{
"name": "Ruby",
"bytes": "353"
},
{
"name": "Shell",
"bytes": "200906"
}
],
"symlink_target": ""
} |
from dry_rest_permissions.generics import DRYPermissionFiltersBase
from django.db.models import Q
from apps.registration.models import Assignment
from .models import Appearance
from .models import Round
class AppearanceFilterBackend(DRYPermissionFiltersBase):
def filter_list_queryset(self, request, queryset, view):
"""
Limits all list requests to only be seen by the owners or creators.
"""
if request.user.is_staff:
return queryset
queryset = queryset.filter(
Q(
round__status=Round.STATUS.completed,
) |
Q(
round__session__convention__assignments__person__user=request.user,
round__session__convention__assignments__status__gt=0,
round__session__convention__assignments__category__lte=Assignment.CATEGORY.ca,
)
).distinct()
return queryset
class OutcomeFilterBackend(DRYPermissionFiltersBase):
def filter_list_queryset(self, request, queryset, view):
"""
Limits all list requests to only be seen by the owners or creators.
"""
if request.user.is_staff:
return queryset
queryset = queryset.filter(
Q(
round__status=Round.STATUS.completed,
) |
Q(
round__session__convention__assignments__person__user=request.user,
round__session__convention__assignments__status__gt=0,
round__session__convention__assignments__category__lte=Assignment.CATEGORY.ca,
)
).distinct()
return queryset
class ScoreFilterBackend(DRYPermissionFiltersBase):
def filter_list_queryset(self, request, queryset, view):
"""
Limits all list requests to only be seen by the owners or creators.
"""
if request.user.is_staff:
return queryset
queryset = queryset.filter(
# Assigned DRCJs and CAs can always see
Q(
song__appearance__round__session__convention__assignments__person__user=request.user,
song__appearance__round__session__convention__assignments__status__gt=0,
song__appearance__round__session__convention__assignments__category__lte=Assignment.CATEGORY.ca,
) |
# Panelists can see their own scores
Q(
panelist__person__user=request.user,
panelist__status__gt=0,
) |
# Panelists can see others' scores if Appearance is complete.
Q(
song__appearance__round__panelists__person__user=request.user,
song__appearance__round__panelists__status__gt=0,
song__appearance__status__lte=Appearance.STATUS.completed,
) |
# Group members can see their own scores if complete.
Q(
song__appearance__group__members__person__user=request.user,
song__appearance__group__members__status__gt=0,
song__appearance__status__lte=Appearance.STATUS.completed,
)
).distinct()
return queryset
class SongFilterBackend(DRYPermissionFiltersBase):
def filter_list_queryset(self, request, queryset, view):
"""
Limits all list requests to only be seen by the owners or creators.
"""
if request.user.is_staff:
return queryset
queryset = queryset.filter(
Q(
appearance__round__status=Round.STATUS.completed,
) |
Q(
appearance__round__session__convention__assignments__person__user=request.user,
appearance__round__session__convention__assignments__status__gt=0,
appearance__round__session__convention__assignments__category__lte=Assignment.CATEGORY.ca,
)
).distinct()
return queryset | {
"content_hash": "cc147e396269146eecdc1a146ee8c4d3",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 112,
"avg_line_length": 37.91428571428571,
"alnum_prop": 0.5923134890730972,
"repo_name": "dbinetti/barberscore",
"id": "c2fa9eb095e79fc2e0303665b1964f73e247adf7",
"size": "3981",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "project/apps/adjudication/filterbackends.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "20622"
},
{
"name": "Python",
"bytes": "398095"
}
],
"symlink_target": ""
} |
from twisted.words.protocols import irc
from txircd.modbase import Command
from txircd.utils import irc_lower, epoch
class ModeCommand(Command):
def onUse(self, user, data):
if "targetchan" in data:
if "modes" in data:
data["targetchan"].setMode(user, data["modes"], data["params"])
else:
channel = data["targetchan"]
user.sendMessage(irc.RPL_CHANNELMODEIS, channel.name, channel.modeString(user))
user.sendMessage(irc.RPL_CREATIONTIME, channel.name, str(epoch(channel.created)))
elif "targetuser" in data:
if user == data["targetuser"]:
if "modes" in data:
data["targetuser"].setMode(user, data["modes"], data["params"])
else:
user.sendMessage(irc.RPL_UMODEIS, user.modeString(user))
else:
user.sendMessage(irc.ERR_USERSDONTMATCH, ":Can't operate on modes for other users")
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "MODE", ":You have not registered")
return {}
if not params:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "MODE", ":Not enough parameters")
return {}
if params[0] in self.ircd.users:
if len(params) > 1 and params[1]:
return {
"user": user,
"targetuser": self.ircd.users[params[0]],
"modes": params[1],
"params": params[2:]
}
return {
"user": user,
"targetuser": self.ircd.users[params[0]]
}
if params[0] in self.ircd.channels:
cdata = self.ircd.channels[params[0]]
if not user.hasAccess(cdata, self.ircd.servconfig["channel_minimum_level"]["MODE"]):
if len(params) > 2:
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, cdata.name, ":You must have channel operator access to set channel modes")
return {}
if len(params) > 1:
for mode in params[1]:
if mode == "+" or mode == "-":
continue
if mode in self.ircd.channel_mode_type and self.ircd.channel_mode_type[mode] != 0:
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, cdata.name, ":You must have channel operator access to set channel modes")
return {}
if len(params) > 1 and params[1]:
return {
"user": user,
"targetchan": cdata,
"modes": params[1],
"params": params[2:]
}
return {
"user": user,
"targetchan": cdata
}
user.sendMessage(irc.ERR_NOSUCHNICK, params[0], ":No such nick/channel")
return {}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "MODE" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["MODE"] = "o"
return {
"commands": {
"MODE": ModeCommand()
}
} | {
"content_hash": "45f2ce1c778ef74e71ef5b22770caf87",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 145,
"avg_line_length": 43.03658536585366,
"alnum_prop": 0.5103428733352224,
"repo_name": "DesertBus/txircd",
"id": "b92cae7eaefdf79c339d847e4d06bf22f1fa46f4",
"size": "3529",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "txircd/modules/cmd_mode.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "454329"
}
],
"symlink_target": ""
} |
"""
$description Global live streaming and video on-demand hosting platform.
$url dailymotion.com
$type live, vod
"""
import logging
import re
from streamlink.exceptions import NoStreamsError
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
from streamlink.stream.http import HTTPStream
log = logging.getLogger(__name__)
COOKIES = {
"family_filter": "off",
"ff": "off"
}
STREAM_INFO_URL = "https://www.dailymotion.com/player/metadata/video/{0}"
USER_INFO_URL = "https://api.dailymotion.com/user/{0}"
_media_schema = validate.Schema(validate.any(
{"error": {"title": validate.text}},
# "stream_chromecast_url": validate.url(),
# Chromecast URL is already available in qualities subdict
{"qualities": validate.any({
validate.text: validate.all([{
"type": validate.text,
"url": validate.url()
}])
})
}))
_live_id_schema = validate.Schema(
{
"total": int,
"list": validate.any(
[],
[{"id": validate.text}]
)
}
)
@pluginmatcher(re.compile(r"""
https?://(?:\w+\.)?dailymotion\.com
(?:
(/embed)?/(video|live)/(?P<media_id>[^_?/]+)
|
/(?P<channel_name>[\w-]+)
)
""", re.VERBOSE))
class DailyMotion(Plugin):
def _get_streams_from_media(self, media_id):
res = self.session.http.get(STREAM_INFO_URL.format(media_id), cookies=COOKIES)
media = self.session.http.json(res, schema=_media_schema)
if media.get("error"):
log.error("Failed to get stream: {0}".format(media["error"]["title"]))
return
for quality, streams in media['qualities'].items():
for stream in streams:
if stream['type'] == 'application/x-mpegURL':
if quality != 'auto':
# Avoid duplicate HLS streams with bitrate selector in the URL query
continue
yield from HLSStream.parse_variant_playlist(self.session, stream['url']).items()
elif stream['type'] == 'video/mp4':
# Drop FPS in quality
resolution = re.sub('@[0-9]+', '', quality) + 'p'
yield resolution, HTTPStream(self.session, stream['url'])
def get_live_id(self, username):
"""Get the livestream videoid from a username.
https://developer.dailymotion.com/tools/apiexplorer#/user/videos/list
"""
params = {
"flags": "live_onair"
}
api_user_videos = USER_INFO_URL.format(username) + "/videos"
try:
res = self.session.http.get(
api_user_videos.format(username),
params=params
)
except Exception:
log.error("invalid username")
raise NoStreamsError(self.url)
data = self.session.http.json(res, schema=_live_id_schema)
if data["total"] > 0:
media_id = data["list"][0]["id"]
return media_id
return False
def _get_streams(self):
media_id = self.match.group("media_id")
username = self.match.group("channel_name")
if not media_id and username:
media_id = self.get_live_id(username)
if media_id:
log.debug("Found media ID: {0}".format(media_id))
return self._get_streams_from_media(media_id)
__plugin__ = DailyMotion
| {
"content_hash": "e96ae7dad6b14db60d42f6fbee710b6e",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 100,
"avg_line_length": 31.72072072072072,
"alnum_prop": 0.5714285714285714,
"repo_name": "gravyboat/streamlink",
"id": "8986131afc8e5e18ab0a45a604bfd41d98a21027",
"size": "3521",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/streamlink/plugins/dailymotion.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "1392475"
},
{
"name": "Shell",
"bytes": "6280"
}
],
"symlink_target": ""
} |
from django.apps import apps
from django.urls import reverse
import shopify
class ConfigurationError(BaseException):
pass
class LoginProtection(object):
def __init__(self, get_response):
self.get_response = get_response
self.api_key = apps.get_app_config('shopify_app').SHOPIFY_API_KEY
self.api_secret = apps.get_app_config('shopify_app').SHOPIFY_API_SECRET
if not self.api_key or not self.api_secret:
raise ConfigurationError("SHOPIFY_API_KEY and SHOPIFY_API_SECRET must be set in ShopifyAppConfig")
shopify.Session.setup(api_key=self.api_key, secret=self.api_secret)
def __call__(self, request):
if hasattr(request, 'session') and 'shopify' in request.session:
api_version = apps.get_app_config('shopify_app').SHOPIFY_API_VERSION
shop_url = request.session['shopify']['shop_url']
shopify_session = shopify.Session(shop_url, api_version)
shopify_session.token = request.session['shopify']['access_token']
shopify.ShopifyResource.activate_session(shopify_session)
response = self.get_response(request)
shopify.ShopifyResource.clear_session()
return response
| {
"content_hash": "02ae560172b8322baea579687a9b3d9e",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 110,
"avg_line_length": 45.111111111111114,
"alnum_prop": 0.6822660098522167,
"repo_name": "Shopify/shopify_django_app",
"id": "854ecec8b683e87992a983c40c6e63b89b7005b5",
"size": "1218",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shopify_app/middleware.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5128"
},
{
"name": "HTML",
"bytes": "5524"
},
{
"name": "Python",
"bytes": "12669"
}
],
"symlink_target": ""
} |
"""
This algorithm determines if a spike train `spk` can be considered as
stationary process (constant firing rate) or not as stationary process (i.e.
presence of one or more points at which the rate increases or decreases). In
case of non-stationarity, the output is a list of detected Change Points (CPs).
Essentially, a det of two-sided window of width `h` (`_filter(t, h, spk)`)
slides over the spike train within the time `[h, t_final-h]`. This generates a
`_filter_process(time_step, h, spk)` that assigns at each time `t` the
difference between a spike lying in the right and left window. If at any time
`t` this difference is large 'enough' is assumed the presence of a rate Change
Point in a neighborhood of `t`. A threshold `test_quantile` for the maximum of
the filter_process (max difference of spike count between the left and right
window) is derived based on asymptotic considerations. The procedure is
repeated for an arbitrary set of windows, with different size `h`.
Examples
--------
The following applies multiple_filter_test to a spike trains.
>>> import quantities as pq
>>> import neo
>>> from elephant.change_point_detection import multiple_filter_test
...
>>> test_array = [1.1,1.2,1.4, 1.6,1.7,1.75,1.8,1.85,1.9,1.95]
>>> st = neo.SpikeTrain(test_array, units='s', t_stop = 2.1)
>>> window_size = [0.5]*pq.s
>>> t_fin = 2.1*pq.s
>>> alpha = 5.0
>>> num_surrogates = 10000
>>> change_points = multiple_filter_test(window_size, st, t_fin, alpha,
... num_surrogates, time_step = 0.5*pq.s)
References
----------
Messer, M., Kirchner, M., Schiemann, J., Roeper, J., Neininger, R., &
Schneider, G. (2014). A multiple filter test for the detection of rate changes
in renewal processes with varying variance. The Annals of Applied Statistics,
8(4),2027-2067.
Original code
-------------
Adapted from the published R implementation:
DOI: 10.1214/14-AOAS782SUPP;.r
"""
from __future__ import division, print_function, unicode_literals
import numpy as np
import quantities as pq
from elephant.utils import deprecated_alias
__all__ = [
"multiple_filter_test",
"empirical_parameters"
]
@deprecated_alias(dt='time_step')
def multiple_filter_test(window_sizes, spiketrain, t_final, alpha,
n_surrogates, test_quantile=None, test_param=None,
time_step=None):
"""
Detects change points.
This function returns the detected change points, that correspond to the
maxima of the `_filter_processes`. These are the processes generated by
sliding the windows of step `time_step`; at each step the difference
between spike on the right and left window is calculated.
Parameters
----------
window_sizes : list of quantity objects
list that contains windows sizes
spiketrain : neo.SpikeTrain, numpy array or list
spiketrain objects to analyze
t_final : quantity
final time of the spike train which is to be analysed
alpha : float
alpha-quantile in range [0, 100] for the set of maxima of the limit
processes
n_surrogates : integer
numbers of simulated limit processes
test_quantile : float
threshold for the maxima of the filter derivative processes, if any
of these maxima is larger than this value, it is assumed the
presence of a cp at the time corresponding to that maximum
time_step : quantity
resolution, time step at which the windows are slided
test_param : np.array of shape (3, num of window),
first row: list of `h`, second and third rows: empirical means and
variances of the limit process correspodning to `h`. This will be
used to normalize the `filter_process` in order to give to the
every maximum the same impact on the global statistic.
Returns
-------
cps : list of list
one list for each window size `h`, containing the points detected with
the corresponding `filter_process`. N.B.: only cps whose h-neighborhood
does not include previously detected cps (with smaller window h) are
added to the list.
"""
if (test_quantile is None) and (test_param is None):
test_quantile, test_param = empirical_parameters(window_sizes, t_final,
alpha, n_surrogates,
time_step)
elif test_quantile is None:
test_quantile = empirical_parameters(window_sizes, t_final, alpha,
n_surrogates, time_step)[0]
elif test_param is None:
test_param = empirical_parameters(window_sizes, t_final, alpha,
n_surrogates, time_step)[1]
spk = spiketrain
# List of lists of detected change points (CPs), to be returned
cps = []
for i, h in enumerate(window_sizes):
# automatic setting of time_step
dt_temp = h / 20 if time_step is None else time_step
# filter_process for window of size h
t, differences = _filter_process(dt_temp, h, spk, t_final, test_param)
time_index = np.arange(len(differences))
# Point detected with window h
cps_window = []
while np.max(differences) > test_quantile:
cp_index = np.argmax(differences)
# from index to time
cp = cp_index * dt_temp + h
# before repeating the procedure, the h-neighbourgs of detected CP
# are discarded, because rate changes into it are alrady explained
mask_fore = time_index > cp_index - int((h / dt_temp).simplified)
mask_back = time_index < cp_index + int((h / dt_temp).simplified)
differences[mask_fore & mask_back] = 0
# check if the neighbourhood of detected cp does not contain cps
# detected with other windows
neighbourhood_free = True
# iterate on lists of cps detected with smaller window
for j in range(i):
# iterate on CPs detected with the j-th smallest window
for c_pre in cps[j]:
if c_pre - h < cp < c_pre + h:
neighbourhood_free = False
break
# if none of the previously detected CPs falls in the h-
# neighbourhood
if neighbourhood_free:
# add the current CP to the list
cps_window.append(cp)
# add the present list to the grand list
cps.append(cps_window)
return cps
def _brownian_motion(t_in, t_fin, x_in, time_step):
"""
Generate a Brownian Motion.
Parameters
----------
t_in : quantities,
initial time
t_fin : quantities,
final time
x_in : float,
initial point of the process: _brownian_motio(0) = x_in
time_step : quantities,
resolution, time step at which brownian increments are summed
Returns
-------
Brownian motion on [t_in, t_fin], with resolution time_step and initial
state x_in
"""
u = 1 * pq.s
try:
t_in_sec = t_in.rescale(u).magnitude
except ValueError:
raise ValueError("t_in must be a time quantity")
try:
t_fin_sec = t_fin.rescale(u).magnitude
except ValueError:
raise ValueError("t_fin must be a time quantity")
try:
dt_sec = time_step.rescale(u).magnitude
except ValueError:
raise ValueError("dt must be a time quantity")
x = np.random.normal(0, np.sqrt(dt_sec),
size=int((t_fin_sec - t_in_sec) / dt_sec))
s = np.cumsum(x)
return s + x_in
def _limit_processes(window_sizes, t_final, time_step):
"""
Generate the limit processes (depending only on t_final and h), one for
each window size `h` in H. The distribution of maxima of these processes
is used to derive threshold `test_quantile` and parameters `test_param`.
Parameters
----------
window_sizes : list of quantities
set of windows' size
t_final : quantity object
end of limit process
time_step : quantity object
resolution, time step at which the windows are slided
Returns
-------
limit_processes : list of numpy array
each entries contains the limit processes for each h,
evaluated in [h,T-h] with steps time_step
"""
limit_processes = []
u = 1 * pq.s
try:
window_sizes_sec = window_sizes.rescale(u).magnitude
except ValueError:
raise ValueError("window_sizes must be a list of times")
try:
dt_sec = time_step.rescale(u).magnitude
except ValueError:
raise ValueError("time_step must be a time quantity")
w = _brownian_motion(0 * u, t_final, 0, time_step)
for h in window_sizes_sec:
# BM on [h,T-h], shifted in time t-->t+h
brownian_right = w[int(2 * h / dt_sec):]
# BM on [h,T-h], shifted in time t-->t-h
brownian_left = w[:int(-2 * h / dt_sec)]
# BM on [h,T-h]
brownian_center = w[int(h / dt_sec):int(-h / dt_sec)]
modul = np.abs(brownian_right + brownian_left - 2 * brownian_center)
limit_process_h = modul / (np.sqrt(2 * h))
limit_processes.append(limit_process_h)
return limit_processes
@deprecated_alias(dt='time_step')
def empirical_parameters(window_sizes, t_final, alpha, n_surrogates,
time_step=None):
"""
This function generates the threshold and the null parameters.
The`_filter_process_h` has been proved to converge (for t_fin,
h-->infinity) to a continuous functional of a Brownaian motion
('limit_process'). Using a MonteCarlo technique, maxima of
these limit_processes are collected.
The threshold is defined as the alpha quantile of this set of maxima.
Namely:
test_quantile := alpha quantile of {max_(h in window_size)[
max_(t in [h, t_final-h])_limit_process_h(t)]}
Parameters
----------
window_sizes : list of quantity objects
set of windows' size
t_final : quantity object
final time of the spike
alpha : float
alpha-quantile in range [0, 100]
n_surrogates : integer
numbers of simulated limit processes
time_step : quantity object
resolution, time step at which the windows are slided
Returns
-------
test_quantile : float
threshold for the maxima of the filter derivative processes, if any
of these maxima is larger than this value, it is assumed the
presence of a cp at the time corresponding to that maximum
test_param : np.array 3 * num of window,
first row: list of `h`, second and third rows: empirical means and
variances of the limit process correspodning to `h`. This will be
used to normalize the `filter_process` in order to give to the every
maximum the same impact on the global statistic.
"""
# try:
# window_sizes_sec = window_sizes.rescale(u)
# except ValueError:
# raise ValueError("H must be a list of times")
# window_sizes_mag = window_sizes_sec.magnitude
# try:
# t_final_sec = t_final.rescale(u)
# except ValueError:
# raise ValueError("T must be a time quantity")
# t_final_mag = t_final_sec.magnitude
if not isinstance(window_sizes, pq.Quantity):
raise ValueError("window_sizes must be a list of time quantities")
if not isinstance(t_final, pq.Quantity):
raise ValueError("t_final must be a time quantity")
if not isinstance(n_surrogates, int):
raise TypeError("n_surrogates must be an integer")
if not (isinstance(time_step, pq.Quantity) or (time_step is None)):
raise ValueError("time_step must be a time quantity")
if t_final <= 0:
raise ValueError("t_final needs to be strictly positive")
if alpha * (100.0 - alpha) < 0:
raise ValueError("alpha needs to be in (0,100)")
if np.min(window_sizes) <= 0:
raise ValueError("window size needs to be strictly positive")
if np.max(window_sizes) >= t_final / 2:
raise ValueError("window size too large")
if time_step is not None:
for h in window_sizes:
if int(h.rescale('us')) % int(time_step.rescale('us')) != 0:
raise ValueError(
"Every window size h must be a multiple of time_step")
# Generate a matrix M*: n X m where n = n_surrogates is the number of
# simulated limit processes and m is the number of chosen window sizes.
# Elements are: M*(i,h) = max(t in T)[`limit_process_h`(t)],
# for each h in H and surrogate i
maxima_matrix = []
for i in range(n_surrogates):
# mh_star = []
simu = _limit_processes(window_sizes, t_final, time_step)
# for i, h in enumerate(window_sizes_mag):
# # max over time of the limit process generated with window h
# m_h = np.max(simu[i])
# mh_star.append(m_h)
# max over time of the limit process generated with window h
mh_star = [np.max(x) for x in simu]
maxima_matrix.append(mh_star)
maxima_matrix = np.asanyarray(maxima_matrix)
# these parameters will be used to normalize both the limit_processes (H0)
# and the filter_processes
null_mean = maxima_matrix.mean(axis=0)
null_var = maxima_matrix.var(axis=0)
# matrix normalization by mean and variance of the limit process, in order
# to give, for every h, the same impact on the global maximum
matrix_normalized = (maxima_matrix - null_mean) / np.sqrt(null_var)
great_maxs = np.max(matrix_normalized, axis=1)
test_quantile = np.percentile(great_maxs, 100.0 - alpha)
null_parameters = [window_sizes, null_mean, null_var]
test_param = np.asanyarray(null_parameters)
return test_quantile, test_param
def _filter(t_center, window, spiketrain):
"""
This function calculates the difference of spike counts in the left and
right side of a window of size h centered in t and normalized by its
variance. The variance of this count can be expressed as a combination of
mean and var of the I.S.I. lying inside the window.
Parameters
----------
t_center : quantity
time on which the window is centered
window : quantity
window's size
spiketrain : list, numpy array or SpikeTrain
spike train to analyze
Returns
-------
difference : float,
difference of spike count normalized by its variance
"""
u = 1 * pq.s
try:
t_sec = t_center.rescale(u).magnitude
except AttributeError:
raise ValueError("t must be a quantities object")
# tm = t_sec.magnitude
try:
h_sec = window.rescale(u).magnitude
except AttributeError:
raise ValueError("h must be a time quantity")
# hm = h_sec.magnitude
try:
spk_sec = spiketrain.rescale(u).magnitude
except AttributeError:
raise ValueError(
"spiketrain must be a list (array) of times or a neo spiketrain")
# cut spike-train on the right
train_right = spk_sec[(t_sec < spk_sec) & (spk_sec < t_sec + h_sec)]
# cut spike-train on the left
train_left = spk_sec[(t_sec - h_sec < spk_sec) & (spk_sec < t_sec)]
# spike count in the right side
count_right = train_right.size
# spike count in the left side
count_left = train_left.size
# form spikes to I.S.I
isi_right = np.diff(train_right)
isi_left = np.diff(train_left)
if isi_right.size == 0:
mu_ri = 0
sigma_ri = 0
else:
# mean of I.S.I inside the window
mu_ri = np.mean(isi_right)
# var of I.S.I inside the window
sigma_ri = np.var(isi_right)
if isi_left.size == 0:
mu_le = 0
sigma_le = 0
else:
mu_le = np.mean(isi_left)
sigma_le = np.var(isi_left)
if (sigma_le > 0) & (sigma_ri > 0):
s_quad = (sigma_ri / mu_ri**3) * h_sec + (sigma_le / mu_le**3) * h_sec
else:
s_quad = 0
if s_quad == 0:
difference = 0
else:
difference = (count_right - count_left) / np.sqrt(s_quad)
return difference
def _filter_process(time_step, h, spk, t_final, test_param):
"""
Given a spike train `spk` and a window size `h`, this function generates
the `filter derivative process` by evaluating the function `_filter`
in steps of `time_step`.
Parameters
----------
h : quantity object
window's size
t_final : quantity,
time on which the window is centered
spk : list, array or SpikeTrain
spike train to analyze
time_step : quantity object, time step at which the windows are slided
resolution
test_param : matrix, the means of the first row list of `h`,
the second row Empirical and the third row variances of
the limit processes `Lh` are used to normalize the number
of elements inside the windows
Returns
-------
time_domain : numpy array
time domain of the `filter derivative process`
filter_process : array,
values of the `filter derivative process`
"""
u = 1 * pq.s
try:
h_sec = h.rescale(u).magnitude
except AttributeError:
raise ValueError("h must be a time quantity")
try:
t_final_sec = t_final.rescale(u).magnitude
except AttributeError:
raise ValueError("t_final must be a time quanity")
try:
dt_sec = time_step.rescale(u).magnitude
except AttributeError:
raise ValueError("time_step must be a time quantity")
# domain of the process
time_domain = np.arange(h_sec, t_final_sec - h_sec, dt_sec)
filter_trajectrory = []
# taken from the function used to generate the threshold
emp_mean_h = test_param[1][test_param[0] == h]
emp_var_h = test_param[2][test_param[0] == h]
for t in time_domain:
filter_trajectrory.append(_filter(t * u, h, spk))
filter_trajectrory = np.asanyarray(filter_trajectrory)
# ordered normalization to give each process the same impact on the max
filter_process = (
np.abs(filter_trajectrory) - emp_mean_h) / np.sqrt(emp_var_h)
return time_domain, filter_process
| {
"content_hash": "c4e2d371e6c4c6c842bc0fba0021063e",
"timestamp": "",
"source": "github",
"line_count": 501,
"max_line_length": 79,
"avg_line_length": 37.025948103792416,
"alnum_prop": 0.620754716981132,
"repo_name": "JuliaSprenger/elephant",
"id": "fb24763db9fdf04e05702c22151e5463cf27cd3a",
"size": "18575",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "elephant/change_point_detection.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1327156"
}
],
"symlink_target": ""
} |
import unittest
from katas.kyu_7.remove_the_minimum import remove_smallest
class RemoveSmallestTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(remove_smallest([1, 2, 3, 4, 5]), [2, 3, 4, 5])
def test_equals_2(self):
self.assertEqual(remove_smallest([5, 3, 2, 1, 4]), [5, 3, 2, 4])
def test_equals_3(self):
self.assertEqual(remove_smallest([1, 2, 3, 1, 1]), [2, 3, 1, 1])
def test_equals_4(self):
self.assertEqual(remove_smallest([]), [])
| {
"content_hash": "7ce3e8aa7161b4698ac2719c0ad91f6c",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 72,
"avg_line_length": 30.176470588235293,
"alnum_prop": 0.6237816764132553,
"repo_name": "the-zebulan/CodeWars",
"id": "bb489edd5e4442b7bddd01e2338aaef9db252afb",
"size": "513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/kyu_7_tests/test_remove_the_minimum.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1203000"
}
],
"symlink_target": ""
} |
class Connector():
"""
Simple signal-slot connector
"""
def __init__(self):
self.connections = {}
self._nextid = 0
def emit(self, signal_id, payload):
if signal_id in self.connections:
for action in self.connections[signal_id].values():
action(self, payload)
def connect(self, signal_id, action):
if signal_id not in self.connections:
self.connections[signal_id] = {}
connection_id = self._nextid
self._nextid += 1
self.connections[signal_id][connection_id] = action
return connection_id
def disconnect(self, connection_id):
for action_dict in self.connections.values():
if connection_id in action_dict:
del action_dict[connection_id]
return True
return False
| {
"content_hash": "786e84c1df3af5e267b465a6572605be",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 63,
"avg_line_length": 31.666666666666668,
"alnum_prop": 0.5801169590643275,
"repo_name": "uppi/meduzach",
"id": "b3d21651b1dc8721b46a45d0cfeaede08d858efc",
"size": "855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "meduzach/connections.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "150987"
}
],
"symlink_target": ""
} |
from django.forms import BooleanField, Form
from django.conf import settings
from django.contrib import messages
from vumi.persist.redis_manager import RedisManager
from vxpolls.manager import PollManager
from go.conversation.view_definition import (
ConversationViewDefinitionBase, ConversationTemplateView)
from go.apps.surveys import forms
def get_poll_config(poll_id):
# FIXME: Do we really need this?
redis = RedisManager.from_config(settings.VXPOLLS_REDIS_CONFIG)
pm = PollManager(redis, settings.VXPOLLS_PREFIX)
config = pm.get_config(poll_id)
config.update({
'poll_id': poll_id,
})
config.setdefault('repeatable', True)
config.setdefault('survey_completed_response',
'Thanks for completing the survey')
return pm, config
def _clear_empties(cleaned_data):
"""
FIXME: this is a work around because for some reason Django is seeing
the new (empty) forms in the formsets as stuff that is to be
stored when it really should be discarded.
"""
return [cd for cd in cleaned_data if cd.get('copy')]
class SurveyEditView(ConversationTemplateView):
"""This app is a unique and special snowflake, so it gets special views.
"""
view_name = 'edit'
path_suffix = 'edit/'
template_base = 'surveys'
def get(self, request, conversation):
poll_id = 'poll-%s' % (conversation.key,)
pm, poll_data = get_poll_config(poll_id)
questions_data = poll_data.get('questions', [])
completed_response_data = poll_data.get(
'survey_completed_responses', [])
poll_form = forms.SurveyPollForm(initial=poll_data)
questions_formset = forms.make_form_set(initial=questions_data)
completed_response_formset = forms.make_completed_response_form_set(
initial=completed_response_data)
return self.render_to_response({
'conversation': conversation,
'poll_form': poll_form,
'questions_formset': questions_formset,
'completed_response_formset': completed_response_formset,
})
def post(self, request, conversation):
poll_id = 'poll-%s' % (conversation.key,)
pm, poll_data = get_poll_config(poll_id)
post_data = request.POST.copy()
post_data.update({
'poll_id': poll_id,
})
questions_formset = forms.make_form_set(data=post_data)
poll_form = forms.SurveyPollForm(data=post_data)
completed_response_formset = forms.make_completed_response_form_set(
data=post_data)
if (questions_formset.is_valid() and poll_form.is_valid() and
completed_response_formset.is_valid()):
data = poll_form.cleaned_data.copy()
data.update({
'questions': _clear_empties(questions_formset.cleaned_data),
'survey_completed_responses': _clear_empties(
completed_response_formset.cleaned_data)
})
pm.set(poll_id, data)
messages.info(request, 'Conversation updated.')
if request.POST.get('_save_contents'):
return self.redirect_to(
'edit', conversation_key=conversation.key)
else:
return self.redirect_to(
'show', conversation_key=conversation.key)
return self.render_to_response({
'conversation': conversation,
'poll_form': poll_form,
'questions_formset': questions_formset,
'completed_response_formset': completed_response_formset,
})
class DownloadUserDataForm(Form):
include_old_questions = BooleanField(
label="Include old questions",
help_text=("Whether to include answers to old questions that were"
" once part of the poll but are no longer."),
initial=False, required=False)
class ConversationViewDefinition(ConversationViewDefinitionBase):
edit_view = SurveyEditView
action_forms = {
# TODO: The empty send_survey form is a work-around for not being able
# to directly trigger POSTs via conversation action buttons
'send_survey': Form,
'download_user_data': DownloadUserDataForm,
}
| {
"content_hash": "d541c7df0a312b404350c46ef41f3786",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 78,
"avg_line_length": 36.88034188034188,
"alnum_prop": 0.6329084588644264,
"repo_name": "praekelt/vumi-go",
"id": "1d9cd88d1637d2b9096300bf75750a1882fea28d",
"size": "4315",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "go/apps/surveys/view_definition.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "154585"
},
{
"name": "HTML",
"bytes": "158025"
},
{
"name": "JavaScript",
"bytes": "446112"
},
{
"name": "Python",
"bytes": "2738963"
},
{
"name": "Shell",
"bytes": "6799"
}
],
"symlink_target": ""
} |
"""Unit tests for the auths module."""
import github3
from . import helper
url_for = helper.create_url_helper("https://api.github.com/authorizations/1")
class TestAuthorization(helper.UnitHelper):
"""Authorization unit tests."""
described_class = github3.auths.Authorization
get_auth_example_data = helper.create_example_data_helper(
"authorization_example"
)
example_data = get_auth_example_data()
def test_add_scopes(self):
"""Test the request to add scopes to an authorization."""
self.instance.add_scopes(["scope-one", "scope-two"])
self.post_called_with(
url_for(""), data={"add_scopes": ["scope-one", "scope-two"]}
)
def test_delete(self):
"""Test the request to delete an authorization."""
self.instance.delete()
self.session.delete.assert_called_once_with(url_for(""))
def test_remove_scopes(self):
"""Test the request to remove scopes from an authorization."""
self.instance.remove_scopes(["scope-one", "scope-two", "scope-three"])
self.post_called_with(
url_for(""),
data={"rm_scopes": ["scope-one", "scope-two", "scope-three"]},
)
def test_replace_scopes(self):
"""Test the request to replace the scopes on an authorization."""
self.instance.replace_scopes(
["scope-one", "scope-two", "scope-three"]
)
self.post_called_with(
url_for(""),
data={"scopes": ["scope-one", "scope-two", "scope-three"]},
)
class TestAuthorizationRequiresAuth(helper.UnitRequiresAuthenticationHelper):
"""Test methods that require authentication on Authorization."""
described_class = github3.auths.Authorization
example_data = TestAuthorization.example_data.copy()
def after_setup(self):
"""Disable authentication on the Session."""
self.session.has_auth.return_value = False
self.session.auth = None
def test_add_scopes(self):
"""Test that adding scopes requires authentication."""
self.assert_requires_auth(self.instance.add_scopes)
def test_delete(self):
"""Test that deleteing an authorization requires authentication."""
self.assert_requires_auth(self.instance.delete)
def test_remove_scopes(self):
"""Test that removing scopes requires authentication."""
self.assert_requires_auth(self.instance.remove_scopes)
def test_replace_scopes(self):
"""Test that replacing scopes requires authentication."""
self.assert_requires_auth(self.instance.replace_scopes)
| {
"content_hash": "12599f24a5ca447b2bf6b5f65516feb0",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 78,
"avg_line_length": 33.35443037974684,
"alnum_prop": 0.6432637571157496,
"repo_name": "sigmavirus24/github3.py",
"id": "38836a8e00b87aa80879163c3751806b52312ea8",
"size": "2635",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/unit/test_auths.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "630121"
},
{
"name": "Makefile",
"bytes": "354"
},
{
"name": "Python",
"bytes": "1155917"
}
],
"symlink_target": ""
} |
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, Http404
import json
# Create your views here.
#Imported Models
from .models import Track
#look for the track, based on the title passed by url
def classic_view(request, title):
#Classic try/catch
try:
track = Track.objects.get(title=title)
except Track.DoesNotExist:
raise Http404
#First Method to return a response
return HttpResponse(track.artist)
#Second Method to return a response and data to a view
#params : requests, view, data(as a dictionary)
#return render(request, 'track.html', {'track': track})
def track_view(request, title):
#A more efficient way
track = get_object_or_404(Track, title=title)
#python dictionary
data = {
'title': track.title,
'order': track.order,
'album': track.album.title,
'artist': {
'name': track.artist.first_name,
'bio': track.artist.bio
}
}
#new json object
json_data = json.dumps(data)
return HttpResponse(json_data, content_type='application/json')
| {
"content_hash": "42f9583e0132fc05efc4952ba0354605",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 67,
"avg_line_length": 24.29787234042553,
"alnum_prop": 0.6558669001751314,
"repo_name": "ronsuez/sfotipy-mejorando.la",
"id": "af65f316a8cf7f49e6a48a881918bb4502bcdd91",
"size": "1142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tracks/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15165"
}
],
"symlink_target": ""
} |
config={}
with open('etc/pkg/config', 'r') as config_file:
exec(config_file.read(), config)
print('pkg for FreeBSD %s %s' % (config['DIST'], config['ARCHITECTURE'][1]))
import os
import subprocess
import sys
import re
import io
import shutil
import copy
try:
import urllib2
except ImportError:
from urllib import request as urllib2
import bz2, lzma
try:
import cPickle as pickle
except ImportError:
import pickle
import tarfile
try:
from hashlib import md5
except ImportError:
from md5 import md5
desc="""
pkg {base | -h}
"""
available_package_list_file = 'var/cache/pkg/package_available.pkl'
installed_package_list_file = 'var/cache/pkg/package_installed.pkl'
link_package_list_file = 'var/cache/pkg/package_links.pkl'
package_folder = 'var/cache/pkg/archives'
def usage():
print(desc)
def download(url):
def chunk_report(bytes_so_far, chunk_size, total_size):
if total_size:
percent = float(bytes_so_far) / total_size
percent = round(percent*100, 2)
sys.stdout.write('\r[%0.2f%%] %s...'%(percent, url))
sys.stdout.flush()
else:
data_so_far = float(bytes_so_far)
unit = 'B'
if data_so_far > 1024*5:
data_so_far = data_so_far / 1024
unit = 'kB'
if data_so_far > 1024*5:
data_so_far = data_so_far / 1024
unit = 'MB'
sys.stdout.write('\r[%0.2f%s] %s...'%(data_so_far, unit, url))
sys.stdout.flush()
chunk_size = 8192
data = bytes()
response = urllib2.urlopen(url)
try:
total_size = response.info()['Content-length'].strip()
total_size = int(total_size)
except Exception as e:
print(e)
total_size = 0
bytes_so_far = 0
chunk_report(bytes_so_far, chunk_size, total_size)
while(1):
try:
chunk = response.read(chunk_size)
bytes_so_far += len(chunk)
if not chunk:
break
data += chunk
chunk_report(bytes_so_far, chunk_size, total_size)
except Exception as e:
print(e)
return None
print('')
return data
def base():
for pkg, required in [
('kernel.txz', True),
('base.txz', True),
('lib32.txz', False),
]:
base_url = config['MIRROR'] + config['ARCHITECTURE'][0] + '/' + config['ARCHITECTURE'][1] + '/' + config['DIST'] + '/' + pkg
if required or config['ARCHITECTURE'][2]:
try:
data = download(base_url)
except Exception as e:
if not optional:
raise e
else:
tar = tarfile.open(fileobj=io.BytesIO(data))
for tarinfo in tar:
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 0o700
try:
tar.extract(tarinfo, '.', set_attrs=False)
except ValueError as e:
print(e)
except OSError as e:
print(tarinfo.name)
os.unlink(tarinfo.name)
tar.extract(tarinfo, '.', set_attrs=False)
def fix_links(dir):
for l in os.listdir(dir):
p = os.path.join(dir, l)
if os.path.islink(p):
target = p
seen = set([target])
while os.path.islink(target):
real = os.readlink(target)
parent = os.path.split(target)[0]
if real[0] == '/':
target = '.' + real
else:
target = os.path.join(parent, real)
if target in seen:
print ('recursive link: %s => %s' % (p, target))
seen.add(target)
if os.path.exists(target):
print ('%s => %s' % (p, target))
os.unlink(p)
if os.path.isdir(target):
fix_links(target)
shutil.copytree(target, p)
else:
shutil.copy(target, p)
else:
print('broken link: %s => %s' % (p, target))
os.unlink(p)
elif os.path.isdir(p):
fix_links(p)
if sys.platform == 'win32':
fix_links('.')
if __name__ == '__main__':
command = sys.argv[1]
packages = sys.argv[2:]
try:
if command == '-h':
usage()
elif command == 'base':
if packages:
raise Exception(desc)
base()
else:
raise Exception('unknown command: %s\n\n%s' % (command, desc))
except Exception as e:
print(e.__class__, e)
exit(1)
else:
exit(0)
| {
"content_hash": "deb24e8438f1b1bb9bbefab65097b53c",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 132,
"avg_line_length": 31.078787878787878,
"alnum_prop": 0.4668486739469579,
"repo_name": "nmercier/linux-cross-gcc",
"id": "b04e562aa011caae22c759259c2210aa82dca9a8",
"size": "5128",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pc-freebsd-ppc64/sysroot/pkg.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1047092"
},
{
"name": "C++",
"bytes": "151335"
},
{
"name": "Makefile",
"bytes": "82796"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Python",
"bytes": "29123266"
},
{
"name": "Shell",
"bytes": "14668"
}
],
"symlink_target": ""
} |
from .graph import *
from .check_order import *
| {
"content_hash": "0e2e42eab2abef78ced89bd9fb30d574",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 26,
"avg_line_length": 24,
"alnum_prop": 0.7291666666666666,
"repo_name": "henzk/ape",
"id": "55457548db794777cd75b436d48ac24a3e3afc23",
"size": "48",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ape/feaquencer/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "75028"
},
{
"name": "Shell",
"bytes": "268"
}
],
"symlink_target": ""
} |
"""
Created on Mon Sep 1 00:33:43 2014
@author: andreas
"""
import Queue
import signal
import threading
import time
import PySQLPool
class filler(threading.Thread):
def run(self):
print("{} started!".format(self.getName())+"\n") # "Thread-x started!"
self._loop = True
loops=0
while self._loop:
self.fillQueue()
if loops == 4:
print "after fillQueue()",self._queue.qsize()
loops=0
else:
loops += 1
time.sleep(self._intervall)
print("{} finished!".format(self.getName())) # "Thread-x finished!"
return
def __init__(self, queue, db, intervall=0, name='filler'):
super(filler, self).__init__()
self.setName(name)
self._loop = False
self._intervall = intervall
self._queue = queue
self._db = db
self.daemon = True
self.isrunning = True
# self.start() # ...Start the thread
def fillQueue(self):
query = PySQLPool.getNewQuery(self._db)
query.execute("SELECT id FROM `mc_caching` WHERE last_crawl <= DATE_SUB(NOW(),INTERVAL 1 MINUTE) and `aktiv`='1' and `flag_delete`='0' ORDER BY `last_crawl` ASC")
for row in query.record:
if not row['id'] in self._queue.queue:
self._queue.put(row["id"])
return
def stopLoop(self):
self._loop = False
self.isrunning = False
# print format(self.getName()),"loop off"
def interruptHandler(self,signum, frame):
print "do whatever, like call thread.interrupt_main()"
print signum,frame,"\n\n"
self.stopLoop()
# Main Program
if __name__ == "__main__":
print "not working"
| {
"content_hash": "9616abb4069336f6563f73ea0ce6d19b",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 170,
"avg_line_length": 28.34375,
"alnum_prop": 0.5429988974641676,
"repo_name": "eieste/mccaching",
"id": "7e603a5dafd7b1897890c722cf06156bc062a893",
"size": "1856",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python_daemon/filler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "96485"
},
{
"name": "JavaScript",
"bytes": "289894"
},
{
"name": "PHP",
"bytes": "136375"
},
{
"name": "Python",
"bytes": "37967"
},
{
"name": "Shell",
"bytes": "1371"
}
],
"symlink_target": ""
} |
"""Run AFL repeatedly with externally supplied generated config from STDIN."""
import logging
import tempfile
import os
import sys
import afl
from faucet import config_parser as cp
from faucet.conf import InvalidConfigError
ROUNDS = 50000
LOGNAME = 'FAUCET_FUZZER_LOG'
tmpdir = tempfile.mkdtemp()
conf_file_name = os.path.join(tmpdir, 'faucet.yaml')
def create_config_file(config):
"""Create config file with given contents."""
with open(conf_file_name, 'w', encoding='utf-8') as conf_file:
conf_file.write(config)
return conf_file_name
def main():
"""Runs the py-AFL fuzzer with the faucet config parser"""
logging.disable(logging.CRITICAL)
while afl.loop(ROUNDS): # pylint: disable=c-extension-no-member
config = sys.stdin.read()
file_name = create_config_file(config)
try:
cp.dp_parser(file_name, LOGNAME)
except InvalidConfigError:
pass
if __name__ == "__main__":
main()
| {
"content_hash": "40956b91a1bdc60659045a20eeabb27e",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 78,
"avg_line_length": 23.902439024390244,
"alnum_prop": 0.6744897959183673,
"repo_name": "anarkiwi/faucet",
"id": "f161354a0520a897b6a70fd02ae88ee6c848d780",
"size": "1004",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/generative/fuzzer/config/fuzz_config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2538"
},
{
"name": "Python",
"bytes": "2160701"
},
{
"name": "Shell",
"bytes": "16152"
}
],
"symlink_target": ""
} |
from beyonic.apis.abstract_api import AbstractAPI
class Collection(AbstractAPI):
"""
Collection api wrapper class
"""
_method_path = 'collections'
| {
"content_hash": "59b27af69302c32969150981284215c1",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 49,
"avg_line_length": 20.625,
"alnum_prop": 0.696969696969697,
"repo_name": "beyonic/beyonic-python",
"id": "cc7065af31b6f586c7c4987c054b15ddc1f08aa1",
"size": "165",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "beyonic/apis/collection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5256"
},
{
"name": "HTML",
"bytes": "9832812"
},
{
"name": "JavaScript",
"bytes": "15796"
},
{
"name": "Python",
"bytes": "52763"
}
],
"symlink_target": ""
} |
import tweepy
import sys
from alchemyapi import AlchemyAPI
import json
alchemyapi = AlchemyAPI()
tweets = []
def get(num,tweettype,query,callback):
config = {}
execfile("config.conf", config)
auth = tweepy.OAuthHandler(config["consumer_key"], config["consumer_secret"])
auth.set_access_token(config["access_token_key"], config["access_token_secret"])
api = tweepy.API(auth)
tweets = []
for tweet in api.search(q=query,result_type=tweettype,count=num):
item = dict()
try:
if not ('RT @' in tweet.text):
sentimentvalue = alchemyapi.sentiment("text", tweet.text)
item['user'] = tweet.user.screen_name
item['text'] = tweet.text
item['created_at'] = str(tweet.created_at)
item['sentimentvalue'] = sentimentvalue["docSentiment"]["score"]
item['coordinates'] = tweet.coordinates
tweets.append(item)
except:
print sys.exc_info()[0]
json_encoded = json.dumps(tweets,ensure_ascii=True)
if callback != None:
return str(callback) + "(" + json_encoded+ ");"
else :
return json_encoded
| {
"content_hash": "058f6be7cafe097ecbafc675d1a03348",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 84,
"avg_line_length": 29.21951219512195,
"alnum_prop": 0.5951585976627712,
"repo_name": "Shy/AngelHackCincinnati",
"id": "97d0ddc25d0b8469e0ef5a9672a80dbefa36170f",
"size": "1198",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "get_tweets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "290645"
},
{
"name": "Python",
"bytes": "2895"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0149_userpreferences_invalid_email'),
]
operations = [
migrations.AlterField(
model_name='userpreferences',
name='default_tab',
field=models.CharField(default=b'following', help_text='The activities you see by default on the homepage.', max_length=30, verbose_name='Default tab', choices=[(b'following', 'Following'), (b'all', 'All'), (b'hot', 'Hot')]),
preserve_default=True,
),
]
| {
"content_hash": "b162ae7460cf9e7183ca14bdc6ebf422",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 237,
"avg_line_length": 32.68421052631579,
"alnum_prop": 0.6264090177133655,
"repo_name": "SchoolIdolTomodachi/SchoolIdolAPI",
"id": "802e2c7276b80c55553cf3918c08d6c6fb906b41",
"size": "645",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "api/migrations/0150_auto_20160905_1625.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "67801"
},
{
"name": "HTML",
"bytes": "474730"
},
{
"name": "JavaScript",
"bytes": "93928"
},
{
"name": "Python",
"bytes": "748300"
}
],
"symlink_target": ""
} |
"""Support for Hugo-style shortcodes."""
from __future__ import unicode_literals
from .utils import LOGGER
import sys
# Constants
_TEXT = 1
_SHORTCODE_START = 2
_SHORTCODE_END = 3
class ParsingError(Exception):
"""Used for forwarding parsing error messages to apply_shortcodes."""
pass
def _format_position(data, pos):
"""Return position formatted as line/column.
This is used for prettier error messages.
"""
line = 0
col = 0
llb = '' # last line break
for c in data[:pos]:
if c == '\r' or c == '\n':
if llb and c != llb:
llb = ''
else:
line += 1
col = 0
llb = c
else:
col += 1
llb = ''
return "line {0}, column {1}".format(line + 1, col + 1)
def _skip_whitespace(data, pos, must_be_nontrivial=False):
"""Return first position after whitespace.
If must_be_nontrivial is set to True, raises ParsingError
if no whitespace is found.
"""
if must_be_nontrivial:
if pos == len(data) or not data[pos].isspace():
raise ParsingError("Expecting whitespace at {0}!".format(_format_position(data, pos)))
while pos < len(data):
if not data[pos].isspace():
break
pos += 1
return pos
def _skip_nonwhitespace(data, pos):
"""Return first position not before pos which contains a non-whitespace character."""
while pos < len(data):
if data[pos].isspace():
break
pos += 1
return pos
def _parse_quoted_string(data, start):
"""Parse a quoted string starting at position start in data.
Returns the position after the string followed by the string itself.
"""
value = ''
qc = data[start]
pos = start + 1
while pos < len(data):
char = data[pos]
if char == '\\':
if pos + 1 < len(data):
value += data[pos + 1]
pos += 2
else:
raise ParsingError("Unexpected end of data while escaping ({0})".format(_format_position(data, pos)))
elif (char == "'" or char == '"') and char == qc:
return pos + 1, value
else:
value += char
pos += 1
raise ParsingError("Unexpected end of unquoted string (started at {0})!".format(_format_position(data, start)))
def _parse_unquoted_string(data, start, stop_at_equals):
"""Parse an unquoted string starting at position start in data.
Returns the position after the string followed by the string itself.
In case stop_at_equals is set to True, an equal sign will terminate
the string.
"""
value = ''
pos = start
while pos < len(data):
char = data[pos]
if char == '\\':
if pos + 1 < len(data):
value += data[pos + 1]
pos += 2
else:
raise ParsingError("Unexpected end of data while escaping ({0})".format(_format_position(data, pos)))
elif char.isspace():
break
elif char == '=' and stop_at_equals:
break
elif char == "'" or char == '"':
raise ParsingError("Unexpected quotation mark in unquoted string ({0})".format(_format_position(data, pos)))
else:
value += char
pos += 1
return pos, value
def _parse_string(data, start, stop_at_equals=False, must_have_content=False):
"""Parse a string starting at position start in data.
Returns the position after the string, followed by the string itself, and
followed by a flog indicating whether the following character is an equals
sign (only set if stop_at_equals is True).
If must_have_content is set to True, no empty unquoted strings are accepted.
"""
if start == len(data):
raise ParsingError("Expecting string, but found end of input!")
char = data[start]
if char == '"' or char == "'":
end, value = _parse_quoted_string(data, start)
has_content = True
else:
end, value = _parse_unquoted_string(data, start, stop_at_equals)
has_content = len(value) > 0
if must_have_content and not has_content:
raise ParsingError("String starting at {0} must be non-empty!".format(_format_position(data, start)))
next_is_equals = False
if stop_at_equals and end + 1 < len(data):
next_is_equals = (data[end] == '=')
return end, value, next_is_equals
def _parse_shortcode_args(data, start, shortcode_name, start_pos):
"""When pointed to after a shortcode's name in a shortcode tag, parses the shortcode's arguments until '%}}'.
Returns the position after '%}}', followed by a tuple (args, kw).
name and start_pos are only used for formatting error messages.
"""
args = []
kwargs = {}
pos = start
while True:
# Skip whitespaces
try:
pos = _skip_whitespace(data, pos, must_be_nontrivial=True)
except ParsingError:
if not args and not kwargs:
raise ParsingError("Shortcode '{0}' starting at {1} is not terminated correctly with '%}}}}'!".format(shortcode_name, _format_position(data, start_pos)))
else:
raise ParsingError("Syntax error in shortcode '{0}' at {1}: expecting whitespace!".format(shortcode_name, _format_position(data, pos)))
if pos == len(data):
break
# Check for end of shortcode
if pos + 3 <= len(data) and data[pos:pos + 3] == '%}}':
return pos + 3, (args, kwargs)
# Read name
pos, name, next_is_equals = _parse_string(data, pos, stop_at_equals=True, must_have_content=True)
if next_is_equals:
# Read value
pos, value, _ = _parse_string(data, pos + 1, stop_at_equals=False, must_have_content=False)
# Store keyword argument
kwargs[name] = value
else:
# Store positional argument
args.append(name)
raise ParsingError("Shortcode '{0}' starting at {1} is not terminated correctly with '%}}}}'!".format(shortcode_name, _format_position(data, start_pos)))
def _split_shortcodes(data):
"""Given input data, splits it into a sequence of texts, shortcode starts and shortcode ends.
Returns a list of tuples of the following forms:
1. (_TEXT, text)
2. (_SHORTCODE_START, text, start, name, args)
3. (_SHORTCODE_END, text, start, name)
Here, text is the raw text represented by the token; start is the starting position in data
of the token; name is the name of the shortcode; and args is a tuple (args, kw) as returned
by _parse_shortcode_args.
"""
pos = 0
result = []
while pos < len(data):
# Search for shortcode start
start = data.find('{{%', pos)
if start < 0:
result.append((_TEXT, data[pos:]))
break
result.append((_TEXT, data[pos:start]))
# Extract name
name_start = _skip_whitespace(data, start + 3)
name_end = _skip_nonwhitespace(data, name_start)
name = data[name_start:name_end]
if not name:
raise ParsingError("Syntax error: '{{{{%' must be followed by shortcode name ({0})!".format(_format_position(data, start)))
# Finish shortcode
if name[0] == '/':
# This is a closing shortcode
name = name[1:]
end_start = _skip_whitespace(data, name_end) # start of '%}}'
pos = end_start + 3
# Must be followed by '%}}'
if pos > len(data) or data[end_start:pos] != '%}}':
raise ParsingError("Syntax error: '{{{{% /{0}' must be followed by ' %}}}}' ({1})!".format(name, _format_position(data, end_start)))
result.append((_SHORTCODE_END, data[start:pos], start, name))
elif name == '%}}':
raise ParsingError("Syntax error: '{{{{%' must be followed by shortcode name ({0})!".format(_format_position(data, start)))
else:
# This is an opening shortcode
pos, args = _parse_shortcode_args(data, name_end, shortcode_name=name, start_pos=start)
result.append((_SHORTCODE_START, data[start:pos], start, name, args))
return result
def apply_shortcodes(data, registry, site=None, filename=None, raise_exceptions=False, lang=None):
"""Apply Hugo-style shortcodes on data.
{{% name parameters %}} will end up calling the registered "name" function with the given parameters.
{{% name parameters %}} something {{% /name %}} will call name with the parameters and
one extra "data" parameter containing " something ".
If raise_exceptions is set to True, instead of printing error messages and terminating, errors are
passed on as exceptions to the caller.
The site parameter is passed with the same name to the shortcodes so they can access Nikola state.
>>> print(apply_shortcodes('==> {{% foo bar=baz %}} <==', {'foo': lambda *a, **k: k['bar']}))
==> baz <==
>>> print(apply_shortcodes('==> {{% foo bar=baz %}}some data{{% /foo %}} <==', {'foo': lambda *a, **k: k['bar']+k['data']}))
==> bazsome data <==
"""
empty_string = data[:0] # same string type as data; to make Python 2 happy
try:
# Split input data into text, shortcodes and shortcode endings
sc_data = _split_shortcodes(data)
# Now process data
result = []
pos = 0
while pos < len(sc_data):
current = sc_data[pos]
if current[0] == _TEXT:
result.append(current[1])
pos += 1
elif current[0] == _SHORTCODE_END:
raise ParsingError("Found shortcode ending '{{{{% /{0} %}}}}' which isn't closing a started shortcode ({1})!".format(current[3], _format_position(data, current[2])))
elif current[0] == _SHORTCODE_START:
name = current[3]
# Check if we can find corresponding ending
found = None
for p in range(pos + 1, len(sc_data)):
if sc_data[p][0] == _SHORTCODE_END and sc_data[p][3] == name:
found = p
break
if found:
# Found ending. Extract data argument:
data_arg = []
for p in range(pos + 1, found):
data_arg.append(sc_data[p][1])
data_arg = empty_string.join(data_arg)
pos = found + 1
else:
# Single shortcode
pos += 1
data_arg = ''
args, kw = current[4]
kw['site'] = site
kw['data'] = data_arg
kw['lang'] = lang
if name in registry:
f = registry[name]
if getattr(f, 'nikola_shortcode_pass_filename', None):
kw['filename'] = filename
res = f(*args, **kw)
else:
LOGGER.error('Unknown shortcode {0} (started at {1})', name, _format_position(data, current[2]))
res = ''
result.append(res)
return empty_string.join(result)
except ParsingError as e:
if raise_exceptions:
# Throw up
raise e
if filename:
LOGGER.error("Shortcode error in file {0}: {1}".format(filename, e))
else:
LOGGER.error("Shortcode error: {0}".format(e))
sys.exit(1)
| {
"content_hash": "d5da88589c9ced219b2c2096b39b39f8",
"timestamp": "",
"source": "github",
"line_count": 305,
"max_line_length": 181,
"avg_line_length": 38.19672131147541,
"alnum_prop": 0.5591416309012875,
"repo_name": "x1101/nikola",
"id": "75dd5fb905d8bc83119cdc90383917eafe437ad7",
"size": "12792",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nikola/shortcodes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18268"
},
{
"name": "JavaScript",
"bytes": "24667"
},
{
"name": "Python",
"bytes": "1088257"
},
{
"name": "Shell",
"bytes": "11088"
},
{
"name": "XSLT",
"bytes": "3619"
}
],
"symlink_target": ""
} |
from tartist.nn import Env, opr as O
import numpy as np
import unittest
import functools
class TestNNOprTensor(unittest.TestCase):
@functools.wraps(np.allclose)
def assertTensorClose(self, *args, **kwargs):
return self.assertTrue(np.allclose(*args, **kwargs))
def testAdvancedIndexing(self):
a = O.placeholder('a', shape=(5, 5))
a_val = np.arange(25).reshape((5, 5)).astype('float32')
feed_dict = {a.name: a_val}
self.assertTensorClose(a[0:3].eval(feed_dict=feed_dict), a_val[0:3])
self.assertTensorClose(a[0:3, 0:3].eval(feed_dict=feed_dict), a_val[0:3, 0:3])
with self.assertRaises(NotImplementedError):
self.assertTensorClose(a.set_sub[0:3](1).eval(feed_dict=feed_dict), np.array([1, 1, 1, 3, 4]))
if True:
self.assertTensorClose(a.ai[[0, 3]].eval(feed_dict=feed_dict), a_val[[0, 3]])
self.assertTensorClose(a.ai[[0, 3], [0, 3]].eval(feed_dict=feed_dict), a_val[[0, 3], [0, 3]])
with self.assertRaises(NotImplementedError):
self.assertTensorClose(a.set_ai[[0, 3]](1).eval(feed_dict=feed_dict), np.array([1, 1, 1, 3, 4]))
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "3aae637e9291a6f03fe1176fe5e38637",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 108,
"avg_line_length": 40.333333333333336,
"alnum_prop": 0.6198347107438017,
"repo_name": "vacancy/TensorArtist",
"id": "ba88cb41006bc0f52862499b96a28d001994dcdb",
"size": "1380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_nn_opr_tensor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "497134"
},
{
"name": "Shell",
"bytes": "630"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='CanonicalOrder',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('actor', models.CharField(max_length=64, blank=True)),
('action', models.CharField(blank=True, max_length=1, null=True, choices=[(b'H', b'Hold'), (b'M', b'Move'), (b'S', b'Support'), (b'C', b'Convoy'), (b'B', b'Build'), (b'D', b'Disband')])),
('assist', models.CharField(max_length=64, blank=True)),
('target', models.CharField(max_length=64, blank=True)),
('via_convoy', models.BooleanField()),
('user_issued', models.BooleanField()),
('result', models.CharField(max_length=1, choices=[(b'S', b'Succeeded'), (b'F', b'Failed'), (b'B', b'Bounced'), (b'D', b'Destroyed')])),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DiplomacyPrefs',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('warnings', models.BooleanField(default=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'diplomacyprefs',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('slug', models.SlugField(unique=True)),
('description', models.TextField(blank=True)),
('created', models.DateTimeField(auto_now_add=True)),
('state', models.CharField(default=b'S', max_length=1, choices=[(b'S', b'Setup'), (b'A', b'Active'), (b'P', b'Paused'), (b'F', b'Finished')])),
('open_joins', models.BooleanField(default=True)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, related_name='diplomacy_games', to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Government',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('power', models.CharField(max_length=32, blank=True)),
('game', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='diplomacy.Game')),
('user', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('actor', models.CharField(max_length=64, blank=True)),
('action', models.CharField(blank=True, max_length=1, null=True, choices=[(b'H', b'Hold'), (b'M', b'Move'), (b'S', b'Support'), (b'C', b'Convoy'), (b'B', b'Build'), (b'D', b'Disband')])),
('assist', models.CharField(max_length=64, blank=True)),
('target', models.CharField(max_length=64, blank=True)),
('via_convoy', models.BooleanField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OrderPost',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('government', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='diplomacy.Government')),
],
options={
'ordering': ('timestamp',),
'get_latest_by': 'timestamp',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Ownership',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('territory', models.CharField(max_length=32, blank=True)),
('government', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='diplomacy.Government')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Turn',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('number', models.IntegerField()),
('year', models.IntegerField()),
('season', models.CharField(max_length=2, choices=[(b'S', b'Spring'), (b'SR', b'Spring Retreat'), (b'F', b'Fall'), (b'FR', b'Fall Retreat'), (b'FA', b'Fall Adjustment')])),
('generated', models.DateTimeField(auto_now_add=True)),
('game', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='diplomacy.Game')),
],
options={
'ordering': ('-generated',),
'get_latest_by': 'generated',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Unit',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('u_type', models.CharField(max_length=1, choices=[(b'A', b'Army'), (b'F', b'Fleet')])),
('subregion', models.CharField(max_length=64, blank=True)),
('previous', models.CharField(max_length=64, blank=True)),
('dislodged', models.BooleanField(default=False)),
('displaced_from', models.CharField(max_length=32, blank=True)),
('standoff_from', models.CharField(max_length=32, blank=True)),
('government', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='diplomacy.Government')),
('turn', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='diplomacy.Turn')),
],
options={
'ordering': ('-turn', 'government', 'subregion'),
},
bases=(models.Model,),
),
migrations.AddField(
model_name='ownership',
name='turn',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='diplomacy.Turn'),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='ownership',
unique_together=set([('turn', 'territory')]),
),
migrations.AddField(
model_name='orderpost',
name='turn',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='diplomacy.Turn'),
preserve_default=True,
),
migrations.AddField(
model_name='order',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='orders', to='diplomacy.OrderPost'),
preserve_default=True,
),
migrations.AddField(
model_name='canonicalorder',
name='government',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='diplomacy.Government'),
preserve_default=True,
),
migrations.AddField(
model_name='canonicalorder',
name='turn',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='diplomacy.Turn'),
preserve_default=True,
),
]
| {
"content_hash": "41281761b16deb688a3f0a3cd1cf1ad0",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 203,
"avg_line_length": 48.561797752808985,
"alnum_prop": 0.542572882924572,
"repo_name": "jbradberry/django-diplomacy",
"id": "ed08118e53d9a3073288fb6568df31116c54bb5a",
"size": "8683",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "diplomacy/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1401"
},
{
"name": "HTML",
"bytes": "12807"
},
{
"name": "JavaScript",
"bytes": "3968"
},
{
"name": "Python",
"bytes": "381543"
}
],
"symlink_target": ""
} |
a, s = map(int, input().split())
print('Congratulations!' if a >= s else 'Enjoy another semester...')
| {
"content_hash": "9fa98a5f5c056b31a3f534430127c07f",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 68,
"avg_line_length": 51,
"alnum_prop": 0.6470588235294118,
"repo_name": "knuu/competitive-programming",
"id": "6a7c8646fa28ba6fb7f6161a309901af4ae47b37",
"size": "102",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atcoder/corp/codefes2014relay_b.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "156029"
},
{
"name": "C++",
"bytes": "609501"
},
{
"name": "Haskell",
"bytes": "208"
},
{
"name": "Java",
"bytes": "9111"
},
{
"name": "Nim",
"bytes": "208992"
},
{
"name": "OCaml",
"bytes": "221"
},
{
"name": "Python",
"bytes": "410086"
}
],
"symlink_target": ""
} |
from remote import Remote
from transaction import Transaction as transaction
from singletons import *
generate_keypair = signer.generate_keypair
sign = signer.sign
| {
"content_hash": "4569c998143798a2567165f696a75614",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 50,
"avg_line_length": 27.5,
"alnum_prop": 0.8303030303030303,
"repo_name": "johansten/rtxp-py",
"id": "d71ddfbcc2c59bf6c41033c212670936193fefe4",
"size": "166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rtxp/stellar/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "55900"
}
],
"symlink_target": ""
} |
"""Tests for utilities for testing distributions and/or bijectors."""
import warnings
from absl.testing import parameterized
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions import lognormal
from tensorflow_probability.python.distributions import normal
from tensorflow_probability.python.distributions import poisson
from tensorflow_probability.python.internal import test_util
from tensorflow_probability.python.internal import vectorization_util
@test_util.test_all_tf_execution_regimes
class VectorizationTest(test_util.TestCase):
def test_iid_sample_stateful(self):
# Random fn using stateful samplers.
def fn(key1, key2, seed=None):
return [
normal.Normal(0., 1.).sample([3, 2], seed=seed), {
key1: poisson.Poisson([1., 2., 3., 4.]).sample(seed=seed + 1),
key2: lognormal.LogNormal(0., 1.).sample(seed=seed + 2)
}
]
sample = self.evaluate(
fn('a', key2='b', seed=test_util.test_seed(sampler_type='stateful')))
sample_shape = [6, 1]
iid_fn = vectorization_util.iid_sample(fn, sample_shape=sample_shape)
iid_sample = self.evaluate(iid_fn('a', key2='b', seed=42))
# Check that we did not get repeated samples.
first_sampled_vector = iid_sample[0].flatten()
self.assertAllGreater(
(first_sampled_vector[1:] - first_sampled_vector[0])**2, 1e-6)
expected_iid_shapes = tf.nest.map_structure(
lambda x: np.concatenate([sample_shape, x.shape], axis=0), sample)
iid_shapes = tf.nest.map_structure(lambda x: x.shape, iid_sample)
self.assertAllEqualNested(expected_iid_shapes, iid_shapes)
def test_iid_sample_stateless(self):
sample_shape = [6]
iid_fn = vectorization_util.iid_sample(
tf.random.stateless_normal, sample_shape=sample_shape)
warnings.simplefilter('always')
with warnings.catch_warnings(record=True) as triggered:
samples = iid_fn([], seed=test_util.test_seed(sampler_type='stateless'))
self.assertTrue(
any('may be quite slow' in str(warning.message)
for warning in triggered))
# Check that we did not get repeated samples.
samples_ = self.evaluate(samples)
self.assertAllGreater((samples_[1:] - samples_[0])**2, 1e-6)
def test_docstring_example(self):
add = lambda a, b: a + b
add_vector_to_scalar = vectorization_util.make_rank_polymorphic(
add, core_ndims=(1, 0))
self.assertAllEqual(
[[4., 5.], [5., 6.], [6., 7.]],
self.evaluate(add_vector_to_scalar(
tf.constant([1., 2.]), tf.constant([3., 4., 5.]))))
def test_can_take_structured_input_and_output(self):
# Dummy function that takes a (tuple, dict) pair
# and returns a (dict, scalar) pair.
def fn(x, y):
a, b, c = x
d, e = y['d'], y['e']
return {'r': a * b + c}, d + e
vectorized_fn = vectorization_util.make_rank_polymorphic(
fn, core_ndims=0)
x = np.array([[2.], [3.]]), np.array(2.), np.array([5., 6., 7.])
y = {'d': np.array([[1.]]), 'e': np.array([2., 3., 4.])}
vectorized_result = self.evaluate(vectorized_fn(x, y))
result = tf.nest.map_structure(lambda a, b: a * np.ones(b.shape),
fn(x, y), vectorized_result)
self.assertAllCloseNested(result, vectorized_result)
@parameterized.named_parameters(
('static_shapes', True),
('dynamic_shapes', False))
def tests_aligns_broadcast_dims_using_core_ndims(self, is_static):
np.random.seed(test_util.test_seed() % 2**32)
def matvec(a, b):
# Throws an error if either arg has extra dimensions.
return tf.linalg.matvec(tf.reshape(a, tf.shape(a)[-2:]),
tf.reshape(b, tf.shape(b)[-1:]))
vectorized_matvec = vectorization_util.make_rank_polymorphic(
matvec, core_ndims=(
self.maybe_static(2, is_static=is_static),
self.maybe_static(1, is_static=is_static)))
for (a_shape, b_shape) in (([3, 2], [2]),
([4, 3, 2], [2]),
([4, 3, 2], [5, 1, 2])):
a = self.maybe_static(np.random.randn(*a_shape), is_static=is_static)
b = self.maybe_static(np.random.randn(*b_shape), is_static=is_static)
c = tf.linalg.matvec(a, b)
c_vectorized = vectorized_matvec(a, b)
if is_static:
self.assertAllEqual(c.shape, c_vectorized.shape)
self.assertAllEqual(*self.evaluate((c, c_vectorized)))
def test_can_call_with_variable_number_of_args(self):
def scalar_sum(*args):
return sum([tf.reshape(x, []) for x in args])
vectorized_sum = vectorization_util.make_rank_polymorphic(
scalar_sum, core_ndims=0)
xs = [1.,
np.array([3., 2.]).astype(np.float32),
np.array([[1., 2.], [-4., 3.]]).astype(np.float32)]
self.assertAllEqual(self.evaluate(vectorized_sum(*xs)), sum(xs))
def test_passes_insufficient_rank_input_through_to_function(self):
vectorized_vector_sum = vectorization_util.make_rank_polymorphic(
lambda a, b: a + b, core_ndims=(1, 1))
c = vectorized_vector_sum(tf.convert_to_tensor(3.),
tf.convert_to_tensor([1., 2., 3.]))
self.assertAllClose(c, [4., 5., 6.])
vectorized_matvec = vectorization_util.make_rank_polymorphic(
tf.linalg.matvec, core_ndims=(2, 1))
with self.assertRaisesRegexp(
ValueError, 'Shape must be rank 2 but is rank 1'):
vectorized_matvec(tf.zeros([5]), tf.zeros([2, 1, 5]))
def test_can_escape_vectorization_with_none_ndims(self):
# Suppose the original fn supports `None` as an input.
fn = lambda x, y: (tf.reduce_sum(x, axis=0), y[0] if y is not None else y)
polymorphic_fn = vectorization_util.make_rank_polymorphic(
fn, core_ndims=[1, None])
rx, ry = polymorphic_fn([[1., 2., 4.], [3., 5., 7.]], None)
self.assertAllEqual(rx.shape, [2])
self.assertIsNone(ry)
single_arg_polymorphic_fn = vectorization_util.make_rank_polymorphic(
lambda y: fn(tf.convert_to_tensor([1., 2., 3.]), y), core_ndims=None)
rx, ry = self.evaluate(single_arg_polymorphic_fn(
tf.convert_to_tensor([[1., 3.], [2., 4.]])))
self.assertAllEqual(ry, [1., 3.])
def test_unit_batch_dims_are_flattened(self):
# Define `fn` to expect a vector input.
fn = lambda x: tf.einsum('n->', x)
# Verify that it won't accept a batch dimension.
with self.assertRaisesRegexp(Exception, 'rank'):
fn(tf.zeros([1, 5]))
polymorphic_fn = vectorization_util.make_rank_polymorphic(fn,
core_ndims=[1])
for batch_shape in ([], [1], [1, 1]):
self.assertEqual(batch_shape,
polymorphic_fn(tf.zeros(batch_shape + [5])).shape)
def test_unit_batch_dims_are_not_vectorized(self):
if not tf.executing_eagerly():
self.skipTest('Test relies on eager execution.')
# Define `fn` to expect a vector input.
def must_run_eagerly(x):
if not tf.executing_eagerly():
raise ValueError('Code is running inside tf.function. This may '
'indicate that auto-vectorization is being '
'triggered unnecessarily.')
return x
polymorphic_fn = vectorization_util.make_rank_polymorphic(
must_run_eagerly, core_ndims=[0])
for batch_shape in ([], [1], [1, 1]):
polymorphic_fn(tf.zeros(batch_shape))
def test_docstring_example_passing_fn_arg(self):
def apply_binop(fn, a, b):
return fn(a, b)
apply_binop_to_vector_and_scalar = vectorization_util.make_rank_polymorphic(
apply_binop, core_ndims=(None, 1, 0))
r = self.evaluate(apply_binop_to_vector_and_scalar(
lambda a, b: a * b, tf.constant([1., 2.]), tf.constant([3., 4., 5.])))
self.assertAllEqual(r, np.array(
[[3., 6.], [4., 8.], [5., 10.]], dtype=np.float32))
def test_rectifies_distribution_batch_shapes(self):
def fn(scale):
d = normal.Normal(loc=0, scale=[scale])
x = d.sample()
return d, x, d.log_prob(x)
polymorphic_fn = vectorization_util.make_rank_polymorphic(
fn, core_ndims=(0))
batch_scale = tf.constant([[4., 2., 5.], [1., 2., 1.]], dtype=tf.float32)
d, x, lp = polymorphic_fn(batch_scale)
self.assertAllEqual(d.batch_shape.as_list(), x.shape.as_list())
lp2 = d.log_prob(x)
self.assertAllClose(*self.evaluate((lp, lp2)))
if __name__ == '__main__':
test_util.main()
| {
"content_hash": "53f8155605c3d1ac0019a72f477dc45a",
"timestamp": "",
"source": "github",
"line_count": 217,
"max_line_length": 80,
"avg_line_length": 39.40552995391705,
"alnum_prop": 0.6154835691731961,
"repo_name": "tensorflow/probability",
"id": "034700a05d07b1b1aa09aaad5507e5c4f24135f3",
"size": "9229",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tensorflow_probability/python/internal/vectorization_util_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "55552121"
},
{
"name": "Python",
"bytes": "17339674"
},
{
"name": "Shell",
"bytes": "24852"
},
{
"name": "Starlark",
"bytes": "663851"
}
],
"symlink_target": ""
} |
import random
from locust import HttpUser, TaskSet, task, between
from pyquery import PyQuery
class BrowseDocumentation(TaskSet):
def on_start(self):
# assume all users arrive at the index page
self.index_page()
self.urls_on_current_page = self.toc_urls
@task(10)
def index_page(self):
r = self.client.get("/")
pq = PyQuery(r.content)
link_elements = pq(".toctree-wrapper a.internal")
self.toc_urls = [l.attrib["href"] for l in link_elements]
@task(50)
def load_page(self, url=None):
url = random.choice(self.toc_urls)
r = self.client.get(url)
pq = PyQuery(r.content)
link_elements = pq("a.internal")
self.urls_on_current_page = [l.attrib["href"] for l in link_elements]
@task(30)
def load_sub_page(self):
url = random.choice(self.urls_on_current_page)
r = self.client.get(url)
class AwesomeUser(HttpUser):
tasks = [BrowseDocumentation]
host = "https://docs.locust.io/en/latest/"
# we assume someone who is browsing the Locust docs,
# generally has a quite long waiting time (between
# 20 and 600 seconds), since there's a bunch of text
# on each page
wait_time = between(20, 600)
| {
"content_hash": "6df47b6409769c5d5a9b3b4e6d8a149e",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 77,
"avg_line_length": 30.658536585365855,
"alnum_prop": 0.6332537788385044,
"repo_name": "locustio/locust",
"id": "7d2e3471c13d2efd0710042e4d090d684cb16a77",
"size": "1376",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/browse_docs_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "733"
},
{
"name": "HTML",
"bytes": "33145"
},
{
"name": "JavaScript",
"bytes": "17309"
},
{
"name": "Makefile",
"bytes": "436"
},
{
"name": "Python",
"bytes": "914443"
},
{
"name": "Sass",
"bytes": "10379"
},
{
"name": "Shell",
"bytes": "3452"
}
],
"symlink_target": ""
} |
import base64
import fileinput
import os
import pickle
import platform
import re
import sys
import fnmatch
from collections import defaultdict
from datetime import datetime
from gppylib import gplog, gpversion
from gppylib.commands.base import Command, ExecutionError, WorkerPool, REMOTE
from gppylib.commands.gp import GpStop, GpStart, GpVersion
from gppylib.commands.unix import findCmdInPath
from gppylib.db import dbconn
from gppylib.gparray import GpArray
from gppylib.operations import Operation
from gppylib.operations.utils import RemoteOperation, ParallelOperation
from gppylib.userinput import ask_yesno
logger = gplog.get_default_logger()
DEFAULT_BATCH_SIZE = 16
SYSTEM_FSOID = 3052
DEFAULT_DATABASE = 'postgres'
DEFAULT_BACKUP_DIR_PREFIX = 'pt_rebuild_bk_'
PGPORT=os.environ.get('PGPORT', '5432')
TIMESTAMP = datetime.now().strftime("%Y%m%d%H%M%S")
BACKUP_RESTORE_LOG = os.path.join('/tmp', 'pt_bkup_restore_' + TIMESTAMP + '.log')
TRANSACTION_LOG_DIRS = ['pg_clog', 'pg_xlog', 'pg_distributedlog', 'pg_distributedxidmap', 'pg_changetracking']
NON_EMPTY_TRANSACTION_LOG_DIRS = ['pg_clog', 'pg_xlog', 'pg_distributedlog']
GLOBAL_PERSISTENT_FILES = defaultdict(defaultdict) # {segment: {dbid1: [file1, file2], dbid2: [file3, file4]}}
PER_DATABASE_PERSISTENT_FILES = defaultdict(defaultdict) # {dbid1:{dboid1:[list],dboid2:[list]},
# dbid2:{dboid1:[list],dboid2:[list]}},
# 'list' containing the list of
# relfilenode ids of the perdb pt files (5094, 5095)
class ValidateContentID:
'''
Parses the content_id or the contentid_file passed in by the user and
validates the content ids.
'''
def __init__(self, content_id, contentid_file, gparray):
self.content_id = content_id
self.contentid_file = contentid_file
self.gparray = gparray
def _parse_content_id(self):
'''
Parse the content id passed in by the user and
return it as a list of integers
'''
try:
self.content_id = self.content_id.strip().split(',')
self.content_id = map(int, self.content_id)
except Exception as e:
logger.error('Please correct the input data and try again')
raise Exception('Some content ids are not integers: %s.' % self.content_id)
return self.content_id
def _validate_contentid_file(self):
'''
Validate the file for invalid integers
Skips blank lines
'''
if not os.path.isfile(self.contentid_file):
raise Exception('Unable to find contentid file "%s"' % self.contentid_file)
self.content_id = []
with open(self.contentid_file) as fp:
for line in fp:
line = line.strip()
if line:
try:
line = int(line)
except Exception as e:
logger.error('File should contain integer content ids one per line. Please correct the input data and try again')
raise Exception('Found non integer content id "%s" in contentid file "%s"' % (line, self.contentid_file))
self.content_id.append(line)
if not self.content_id:
raise Exception('Please make sure there is atleast one integer content ID in the file')
return self.content_id
def _validate_content_id(self):
"""
Validates that the content ids are valid contents by checking
gp_segment_configuration and that atleast the primary is up.
Returns error if content id is not present in gp_segment_configuration
or both primary and mirror are down
"""
"""Collect a list of all content ids that are in the gp_segment_configuration"""
valid_content_ids = set([seg.getSegmentContentId() for seg in self.gparray.getDbList()])
invalid_content_ids = set()
for c in self.content_id:
if c not in valid_content_ids:
invalid_content_ids.add(c)
if len(invalid_content_ids) > 0:
raise Exception('The following content ids are not present in gp_segment_configuration: %s' % ', '.join(map(str, invalid_content_ids)))
"""
Collect a list of all segments where either primary or mirror is up. Whichever segment is up
will be acting as primary. If we don't find even one segment that is up for a particular
content, then it mean both primary and mirror are down. Hence down_content_ids will only
be populated if both the primary and mirror are down and not if only one of them is down.
"""
up_content_ids = set()
resync_content_ids = set()
resync_content_ids_to_rebuild = set()
resync_content_ids_notto_rebuild = set()
for seg in self.gparray.getDbList():
status = seg.getSegmentStatus()
mode = seg.getSegmentMode()
c = seg.getSegmentContentId()
if status == 'u':
up_content_ids.add(c)
if mode == 'r' and status == 'u':
resync_content_ids.add(c)
for c in self.content_id:
if c in resync_content_ids:
resync_content_ids_to_rebuild.add(c)
resync_content_ids_notto_rebuild = resync_content_ids.difference(resync_content_ids_to_rebuild)
if len(resync_content_ids_to_rebuild) > 0:
raise Exception('Can not rebuild persistent tables for content ids that are in resync mode: %s' % ', '.join(map(str, resync_content_ids_to_rebuild)))
if len(resync_content_ids_notto_rebuild) > 0:
warning_msgs= ['****************************************************', 'There are some other content ids which are in resync.',
'Start rebuild process may change their current status.', '****************************************************']
for warning_msg in warning_msgs:
logger.warning(warning_msg)
input = ask_yesno(None, 'Do you still wish to continue ?', 'N')
if not input:
raise Exception('Aborting rebuild due to user request')
return self.content_id
def validate(self):
"""
We make sure that the user does not pass in both content_id and
contentid_file in gppersistentrebuild and that alteast one of
them is definitely passed in. All the content-ids are populated
in self.content_id and that is validated against gp_segment_configuration
"""
if self.content_id:
self._parse_content_id()
elif self.contentid_file:
self._validate_contentid_file()
return self._validate_content_id()
class DbIdInfo:
"""
Stores all the information regarding a single dbid
"""
def __init__(self, content, role, dbid, port, hostname, filespace_dirs, fs_to_ts_map, ts_to_dboid_map, is_down):
self.content = content
self.role = role
self.dbid = dbid
self.port = port
self.hostname = hostname
self.filespace_dirs = filespace_dirs
self.fs_to_ts_map = fs_to_ts_map
self.ts_to_dboid_map = ts_to_dboid_map
self.is_down = is_down
def __eq__(self, other):
return vars(self) == vars(other)
def __str__(self):
return '%s:%s:%s:%s:%s:%s:%s:%s:%s' % (self.content, self.role, self.dbid, self.port, self.hostname, self.filespace_dirs,
self.fs_to_ts_map, self.ts_to_dboid_map, self.is_down)
class GetDbIdInfo:
"""
Query the catalog tables to get information about
filespaces/tablespaces and persistent table files
for each dbid
"""
def __init__(self, gparray, content_id):
self.gparray = gparray
self.content_id = content_id
def _get_filespace_to_tablespace_map(self, segdb):
'''
Get a map of filespace oids to tablespace oids for a given segdb
The key is a single integer representing the oid of a filespace
The value is a list of oids which represent the oids of
tablespaces
'''
fs_to_ts_map = {}
fs_oids = segdb.getSegmentFilespaces().keys()
FILESPACE_TO_TABLESPACE_MAP_QUERY = """SELECT spcfsoid, string_agg(oid, ' ')
FROM pg_tablespace
WHERE spcfsoid IN (%s) GROUP BY spcfsoid""" % ', '.join(map(str, fs_oids))
with dbconn.connect(dbconn.DbURL(dbname=DEFAULT_DATABASE)) as conn:
res = dbconn.execSQL(conn, FILESPACE_TO_TABLESPACE_MAP_QUERY)
for r in res:
fs_to_ts_map[r[0]] = [int(x) for x in r[1].split()]
return fs_to_ts_map
def _get_tablespace_to_dboid_map(self, ts_oids):
'''
Get a map of tablespaces oids to database oids for a given tablespace
The key is a single integer representing the oid of a tbalespace
The value is a list of oids which represent the oids of databases
'''
ts_to_dboid_map = {}
TABLESPACE_TO_DBOID_MAP_QUERY = """SELECT dattablespace, string_agg(oid, ' ')
FROM pg_database
WHERE dattablespace IN (%s) GROUP BY dattablespace""" % ', '.join(map(str, ts_oids))
with dbconn.connect(dbconn.DbURL(dbname=DEFAULT_DATABASE)) as conn:
res = dbconn.execSQL(conn, TABLESPACE_TO_DBOID_MAP_QUERY)
for r in res:
ts_to_dboid_map[r[0]] = [int(x) for x in r[1].split()]
return ts_to_dboid_map
def get_info(self):
'''
This method gets the information for all the segdbs where we want to rebuild the
persistent tables.
It returns a list of DbIdInfo objects
'''
dbid_info = []
for seg in self.gparray.getDbList():
if seg.getSegmentContentId() in self.content_id:
is_down = seg.isSegmentDown()
role = seg.getSegmentRole()
# We don't want to run the rebuild on the segments that
# are down. This can cause issues, especially when the segment
# in question has missing data/files.
if is_down and role == 'm':
continue
fs_to_ts_map = self._get_filespace_to_tablespace_map(seg)
ts_oids = []
for fsoid, ts in fs_to_ts_map.items():
ts_oids += ts
ts_to_dboid_map = self._get_tablespace_to_dboid_map(ts_oids)
di = DbIdInfo(content=seg.getSegmentContentId(),
role=seg.getSegmentRole(),
dbid=seg.getSegmentDbId(),
port=seg.getSegmentPort(),
hostname=seg.getSegmentHostName(),
filespace_dirs=seg.getSegmentFilespaces(),
fs_to_ts_map=fs_to_ts_map,
ts_to_dboid_map=ts_to_dboid_map,
is_down=is_down)
dbid_info.append(di)
return dbid_info
class ValidateMD5Sum:
"""Checks the md5 sum for a list of files"""
def __init__(self, pool, batch_size=DEFAULT_BATCH_SIZE):
self.batch_size = batch_size
self.md5_prog = None
self.md5_results_pat = None
self.pool = pool
def _get_md5_prog(self):
"""Get the appropriate md5 program for the platform"""
md5_prog = ''
operating_sys = platform.system()
if operating_sys == 'Darwin':
md5_prog = 'md5'
elif operating_sys == 'Linux':
md5_prog = 'md5sum'
else:
raise Exception('Cannot determine the md5 program since %s platform is not supported' % operating_sys)
return md5_prog
def _get_md5_results_pat(self):
"""
We want to parse the results of the md5progs in order to extract the filename
and its correspoding md5sum.
On OSX, the md5 program will return output in the following format
MD5 (<filename>) = <md5_hash>
On Linux, the md5 program will return output in the following format
<md5_hash> <filename>
Hence this returns an re.pattern object so that we can extract the required
information
"""
operating_sys = platform.system()
if operating_sys == 'Darwin':
md5_pat = re.compile('MD5 \((.*)\) = (.*)')
elif operating_sys == 'Linux':
md5_pat = re.compile('(.*) (.*)')
else:
raise Exception('Cannot determine the pattern for results of md5 program since %s platform is not supported' % operating_sys)
return md5_pat
def init(self):
"""
Initialize the class with the md5 program and the pattern
based on the platform
Ideally this should be called once per run of the program
in order to be efficient. It is the callers reponsibilty
to ensure that.
"""
self.md5_prog = self._get_md5_prog()
self.md5_results_pat = self._get_md5_results_pat()
def _process_md5_results(self):
"""
Returns a dictionary with the key as the filename
and the value as the md5 hash value
If there was any error, it raises an Exception
"""
md5s = {}
for item in self.pool.getCompletedItems():
result = item.get_results()
if not result.wasSuccessful():
raise Exception('Unable to calculate md5sum for: %s' % (result.stderr.strip()))
md5_results = result.stdout.strip().split('\n')
for md5_result in md5_results:
mat = self.md5_results_pat.match(md5_result.strip())
if mat:
if platform.system() == 'Linux':
f, md5 = mat.group(2), mat.group(1)
else:
f, md5 = mat.group(1), mat.group(2)
md5s[f.strip()] = md5.strip()
return md5s
def validate(self, src_files):
"""Run the md5 program and calculate the md5sum for the src_files"""
for f in src_files:
cmd = Command('calculate md5sum for file', cmdStr='%s %s' % (self.md5_prog, f))
self.pool.addCommand(cmd)
self.pool.join()
return self._process_md5_results()
class BackupPersistentTableFiles:
"""
Backup all the global and the per database persistent table files
"""
def __init__(self, dbid_info, timestamp, perdb_pt_filenames, global_pt_filenames, batch_size=DEFAULT_BATCH_SIZE, backup_dir=None, validate_only=False):
self.dbid_info = dbid_info
self.timestamp = timestamp
self.batch_size = batch_size
self.backup_dir = backup_dir
self.md5_validator = None
self.pool = None
self.GLOBAL_PERSISTENT_FILES = global_pt_filenames
self.PER_DATABASE_PERSISTENT_FILES = perdb_pt_filenames
self.validate_only = validate_only
def _cleanup_pool(self):
if self.pool:
self.pool.haltWork()
self.pool.joinWorkers()
self.pool = None
def _copy_files(self, src_files, dest_files, dbid, actionType):
"""
This actually does the copy of the files from src directory to backup directory
In case of backup, the destination folder might not exist. Hence we create it.
While restoring it, we always restore to datadirectory and it should be present,
hence we do not bother to create it.
"""
src_md5 = self.md5_validator.validate(src_files)
with open(BACKUP_RESTORE_LOG, 'a') as fw:
fw.write('************************************************\n')
fw.write('DBID = %s\t%s\n' % (dbid, actionType))
for i in range(len(src_files)):
dest_dir = os.path.dirname(dest_files[i])
try:
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
except Exception, e:
raise Exception("Failed to create destination directory %s\n" % str(e))
fw.write('%s => %s\n' % (src_files[i], dest_files[i]))
cmd = Command('copy files', cmdStr='cp %s %s' % (src_files[i], dest_files[i]))
self.pool.addCommand(cmd)
self.pool.join()
self.pool.check_results()
dest_md5 = self.md5_validator.validate(dest_files)
self.md5_validate(src_files, dest_files, src_md5, dest_md5)
def md5_validate(self, src_files, dest_files, src_md5, dest_md5):
"""
This is to verify that src files are matching dest files based on their md5 hash code
"""
unmatched_expected_src_md5 = {}
unmatched_actual_dest_md5 = {}
for i in range(len(src_files)):
src_file = src_files[i]
dest_file = dest_files[i]
if src_md5[src_file] != dest_md5[dest_file]:
unmatched_expected_src_md5[src_file] = src_md5[src_file]
unmatched_actual_dest_md5[dest_file] = dest_md5[dest_file]
if unmatched_expected_src_md5:
raise Exception('MD5 sums do not match! Expected md5 = "%s", but actual md5 = "%s"' %
(unmatched_expected_src_md5, unmatched_actual_dest_md5))
def build_PT_src_dest_pairs(self, src_dir, dest_dir, file_list):
"""
src_dir: source directory to copy pt files from
dest_dir: destination directory to backup pt files
file_list: list of pt files to backup
return: list of source files and destination files, if missing
any source files, return None, None
"""
if file_list is None or len(file_list) == 0:
logger.error('Persistent source file list is empty or none')
return None, None
if not os.path.isdir(src_dir) or len(os.listdir(src_dir)) == 0:
logger.error('Directory %s either does not exist or is empty' % src_dir)
return None, None
missed_files = []
src_files, dest_files = [], []
for f in file_list:
file = os.path.join(src_dir, f)
if not os.path.isfile(file):
missed_files.append(file)
else:
src_files.append(file)
dest_files.append(os.path.join(dest_dir, f))
# large heap tables splited into parts as [table, table.1, table.2,...]
for relfilenode in os.listdir(src_dir):
if fnmatch.fnmatch(relfilenode, f + ".*"):
src_files.append(os.path.join(src_dir, relfilenode))
dest_files.append(os.path.join(dest_dir, relfilenode))
if len(missed_files) > 0:
logger.error('Missing source files %s' % missed_files)
return None, None
return src_files, dest_files
def build_Xactlog_src_dest_pairs(self, srcDir, destDir):
"""
srcDir: absolute path to source data directory
destDir: absolute path to destination data directory
srcFiles: list of absolute paths to source files
destFiles: list of absolute paths to destination files
This funtion takes whatever under srcDir, and put it under destDir:
eg: srcDir: pg_clog_bk_20150420234453/
srcFile: /data/gpdb/master/gpseg-1/pg_clog_bk_20150420234453/0000
destDir: pg_clog/
destFile: /data/gpdb/master/gpseg-1/pg_clog/0000
"""
srcFiles, destFiles = [], []
for dirName, _, fileList in os.walk(srcDir):
relDir = os.path.relpath(dirName, srcDir)
for f in fileList:
relFile = os.path.join(relDir, f)
srcFile = os.path.join(dirName, f)
destFile = os.path.join(destDir, relFile)
srcFiles.append(srcFile)
destFiles.append(destFile)
return srcFiles, destFiles
def _copy_global_pt_files(self, restore=False):
"""
Copies the global persistent table files to required directory
If there is any error, we add it to the list of failures and return
the list
"""
if restore:
op = 'Restore of global persistent files'
else:
op = 'Backup of global persistent files'
logger.info(op)
failures = []
for di in self.dbid_info:
#Find out the system filespace
fs_dir = di.filespace_dirs[SYSTEM_FSOID].rstrip(os.sep)
data_dir = os.path.join(fs_dir, 'global')
if self.backup_dir:
bk_dir = os.path.join(self.backup_dir,
'%s%s' % (DEFAULT_BACKUP_DIR_PREFIX, self.timestamp),
str(di.dbid),
os.path.basename(fs_dir),
'global')
else:
bk_dir = os.path.join(fs_dir,
'%s%s' % (DEFAULT_BACKUP_DIR_PREFIX, self.timestamp),
'global')
file_list = self.GLOBAL_PERSISTENT_FILES[di.dbid]
if not restore:
src_files, dest_files = self.build_PT_src_dest_pairs(data_dir, bk_dir, file_list)
else:
src_files, dest_files = self.build_PT_src_dest_pairs(bk_dir, data_dir, file_list)
if src_files is None or len(src_files) == 0:
raise Exception('Missing global persistent files from source directory.')
logger.debug('DBID = %s' % di.dbid)
logger.debug('Source files = %s' % src_files)
logger.debug('Destination files = %s' % dest_files)
try:
if not self.validate_only:
self._copy_files(src_files, dest_files, di.dbid, op)
except Exception as e:
failures.append((di, str(e)))
logger.error('%s failed' % op)
logger.error('DBID = %s' % di.dbid)
logger.error('%s' % str(e))
if failures:
raise Exception('%s failed' % op)
def _copy_per_db_pt_files(self, restore=False):
"""
Copies the per database persistent table files to required directory
If there is any error, we add it to the list of failures and return
the list
"""
if restore:
op = 'Restore of per database persistent files'
else:
op = 'Backup of per database persistent files'
logger.info(op)
failures = []
for di in self.dbid_info:
# visit each filespace
sys_filespace = di.filespace_dirs[SYSTEM_FSOID].rstrip(os.sep)
for fsoid, fsdir in di.filespace_dirs.items():
if fsoid not in di.fs_to_ts_map:
continue
fsdir = fsdir.rstrip(os.sep)
# visit each tablespace
for tsoid in di.fs_to_ts_map[fsoid]:
if tsoid not in di.ts_to_dboid_map:
continue
# visit each database
for dboid in di.ts_to_dboid_map[tsoid]:
if fsoid != SYSTEM_FSOID:
base_dir = tsoid
else:
base_dir = 'base'
if dboid in self.PER_DATABASE_PERSISTENT_FILES[di.dbid]:
file_list = self.PER_DATABASE_PERSISTENT_FILES[di.dbid][dboid]
else:
#This corresponds to template0. We cannot connect to template0
#hence the relfilenodeid will never change.
file_list = ['5094', '5095']
#finding out persistent table files for the database
data_dir = os.path.join(fsdir, str(base_dir), str(dboid))
if self.backup_dir:
bk_dir = os.path.join(self.backup_dir,
'%s%s' % (DEFAULT_BACKUP_DIR_PREFIX, self.timestamp),
str(di.dbid),
(os.path.basename(fsdir) if fsoid == SYSTEM_FSOID
else os.path.relpath(fsdir, os.path.dirname(sys_filespace))),
str(base_dir),
str(dboid))
else:
bk_dir = os.path.join(fsdir,
'%s%s' % (DEFAULT_BACKUP_DIR_PREFIX, self.timestamp),
str(base_dir),
str(dboid))
logger.debug('Work on database location = %s' % data_dir)
logger.debug('Search listed persistent tables %s' % file_list)
if not restore:
src_files, dest_files = self.build_PT_src_dest_pairs(data_dir, bk_dir, file_list)
else:
src_files, dest_files = self.build_PT_src_dest_pairs(bk_dir, data_dir, file_list)
if src_files is None or len(src_files) == 0:
raise Exception('Missing per-database persistent files from source directory.')
logger.debug('Source files = %s' % src_files)
logger.debug('Destination files = %s' % dest_files)
try:
if not self.validate_only:
self._copy_files(src_files, dest_files, di.dbid, op)
except Exception as e:
failures.append((di, str(e)))
logger.error('%s failed' % op)
logger.error('DBID = %s' % di.dbid)
logger.error('Filespace location = %s' % di.filespace_dirs[SYSTEM_FSOID])
logger.error('%s' % str(e))
if failures:
raise Exception(' %s failed' % op)
def _copy_Xactlog_files(self, restore=False):
"""
All transaction log directories to back up or restore:
pg_clog: keeps track of the transaction id
pg_xog: keeps track of the checkpoint and WAL log
pg_distributedxidmap: Back up as maintains mapping from distributed to local xid
pg_distributedlog: Back up as maintains distributed transaction commit status
Add error to the list of failures and return the list
"""
if restore:
op = 'Restore of transaction log files'
else:
op = 'Backup of transaction log files'
logger.info(op)
failures = 0
for di in self.dbid_info:
datadir = di.filespace_dirs[SYSTEM_FSOID].rstrip(os.sep)
allSrcFiles, allDestFiles = [], []
for xlog_dir_name in TRANSACTION_LOG_DIRS:
xlog_dir = os.path.join(datadir, xlog_dir_name)
if self.backup_dir:
xlog_backup_dir = os.path.join(self.backup_dir,
'%s%s' % (DEFAULT_BACKUP_DIR_PREFIX, self.timestamp),
str(di.dbid),
os.path.basename(datadir),
xlog_dir_name)
else:
xlog_backup_dir = os.path.join(datadir,
'%s%s' % (DEFAULT_BACKUP_DIR_PREFIX, self.timestamp),
xlog_dir_name)
if restore:
srcFiles, destFiles = self.build_Xactlog_src_dest_pairs(xlog_backup_dir, xlog_dir)
else:
srcFiles, destFiles = self.build_Xactlog_src_dest_pairs(xlog_dir, xlog_backup_dir)
if xlog_dir_name in NON_EMPTY_TRANSACTION_LOG_DIRS and len(srcFiles) == 0:
raise Exception('Source directory %s should not be empty' % (xlog_backup_dir if restore else xlog_dir))
allSrcFiles.extend(srcFiles)
allDestFiles.extend(destFiles)
logger.debug('DBID = %s' % di.dbid)
logger.debug('Source files = %s' % allSrcFiles)
logger.debug('Destination files = %s' % allDestFiles)
try:
if not self.validate_only:
self._copy_files(allSrcFiles, allDestFiles, di.dbid, op)
except Exception as e:
failures += 1
logger.error('%s failed' % op)
logger.error('DBID = %s' % di.dbid)
logger.error('%s' % str(e))
if failures > 0:
raise Exception(' %s failed' % op)
def _copy_pg_control_file(self, restore=False, validate=False):
"""
Backing up or restoring the pg_control file which determines
checkpoint location in the transaction log file when running
a database recovery
"""
if restore:
op = 'Restore of global pg_control file'
else:
op = 'Backup of global pg_control file'
logger.info(op)
failures = 0
for di in self.dbid_info:
datadir = di.filespace_dirs[SYSTEM_FSOID].rstrip(os.sep)
pg_control_path = os.path.join(datadir, 'global', 'pg_control')
if self.backup_dir:
pg_control_backup_path = os.path.join(self.backup_dir,
'%s%s' % (DEFAULT_BACKUP_DIR_PREFIX, self.timestamp),
str(di.dbid),
os.path.basename(datadir),
'global',
'pg_control')
else:
pg_control_backup_path = os.path.join(datadir,
'%s%s' % (DEFAULT_BACKUP_DIR_PREFIX, self.timestamp),
'global',
'pg_control')
srcFiles, destFiles = [], []
if restore:
if not os.path.isfile(pg_control_backup_path):
raise Exception('Global pg_control file is missing from backup directory %s' % pg_control_backup_path)
srcFiles.append(pg_control_backup_path)
destFiles.append(pg_control_path)
else:
if not os.path.isfile(pg_control_path):
raise Exception('Global pg_control file is missing from source directory %s' % pg_control_path)
srcFiles.append(pg_control_path)
destFiles.append(pg_control_backup_path)
logger.debug('DBID = %s' % di.dbid)
logger.debug('Source files = %s' % srcFiles)
logger.debug('Destination files = %s' % destFiles)
try:
if not self.validate_only:
self._copy_files(srcFiles, destFiles, di.dbid, op)
except Exception as e:
failures += 1
logger.error('%s failed' % op)
logger.error('DBID = %s' % di.dbid)
logger.error('%s' % str(e))
if failures > 0:
raise Exception(' %s failed' % op)
def backup(self):
try:
self.pool = WorkerPool(self.batch_size)
self.md5_validator = ValidateMD5Sum(self.pool, self.batch_size)
self.md5_validator.init()
self._copy_global_pt_files()
self._copy_per_db_pt_files()
self._copy_Xactlog_files()
self._copy_pg_control_file()
finally:
self._cleanup_pool()
def restore(self):
try:
self.pool = WorkerPool(self.batch_size)
self.md5_validator = ValidateMD5Sum(self.pool, self.batch_size)
self.md5_validator.init()
self._copy_global_pt_files(restore=True)
self._copy_per_db_pt_files(restore=True)
self._copy_Xactlog_files(restore=True)
self._copy_pg_control_file(restore=True)
finally:
self._cleanup_pool()
class RebuildTableOperation(Operation):
"""
Run the sql functions to rebuild the persistent tables
"""
def __init__(self, dbid_info, has_mirrors):
self.dbid_info = dbid_info
self.has_mirrors = has_mirrors
def run(self):
port = self.dbid_info.port
hostname = self.dbid_info.hostname
filerep_mirror = 'true' if self.has_mirrors else 'false'
with dbconn.connect(dbconn.DbURL(dbname=DEFAULT_DATABASE, port=port), utility=True) as conn:
dbconn.execSQL(conn, 'CHECKPOINT')
conn.commit()
logger.info('Finished checkpoint on %s' % self.dbid_info.filespace_dirs[SYSTEM_FSOID])
dbconn.execSQL(conn, 'SELECT gp_persistent_reset_all()')
conn.commit()
logger.info('Completed gp_persistent_reset_all() on %s' % self.dbid_info.filespace_dirs[SYSTEM_FSOID])
dbconn.execSQL(conn, 'SELECT gp_persistent_build_all(%s)' % filerep_mirror)
conn.commit()
logger.info('Completed gp_persistent_build_all() on %s' % self.dbid_info.filespace_dirs[SYSTEM_FSOID])
dbconn.execSQL(conn, 'CHECKPOINT')
conn.commit()
logger.info('Finished checkpoint on %s' % self.dbid_info.filespace_dirs[SYSTEM_FSOID])
class ValidatePersistentBackup:
"""
Validate that the backup for persistent table files
are present before we acutally do the rebuild
"""
def __init__(self, dbid_info, timestamp, batch_size=DEFAULT_BATCH_SIZE, backup_dir=None):
self.dbid_info = dbid_info
self.timestamp = timestamp
self.batch_size = batch_size
self.backup_dir = backup_dir
self.pool = None
def _cleanup_pool(self):
if self.pool:
self.pool.haltWork()
self.pool.joinWorkers()
self.pool = None
def _process_results(self, di, pool):
err = False
datadir = di.filespace_dirs[SYSTEM_FSOID]
for item in pool.getCompletedItems():
results = item.get_results()
for fsoid, fsdir in di.filespace_dirs.iteritems():
if fsdir in item.cmdStr:
datadir = fsdir
break
if not results.wasSuccessful():
err = True
logger.error('marking failure for content id %s:%s since it was not successful: %s' % (di.content, datadir, results))
elif not results.stdout.strip():
err = True
logger.error('marking failure for content id %s:%s since backup was not found: %s' % (di.content, datadir, results))
if err:
raise Exception('Failed to validate backups')
def validate_backups(self):
try:
self.pool = WorkerPool(self.batch_size)
for di in self.dbid_info:
for fsoid, fsdir in di.filespace_dirs.items():
if fsoid not in di.fs_to_ts_map:
continue
is_filespace_empty = True
for tsoid in di.fs_to_ts_map[fsoid]:
if tsoid in di.ts_to_dboid_map and di.ts_to_dboid_map[tsoid]:
is_filespace_empty = False
break
if not is_filespace_empty:
cmd = Command('Check if pt backup exists', cmdStr='find %s -name %s%s' %
(self.backup_dir if self.backup_dir else fsdir, DEFAULT_BACKUP_DIR_PREFIX, self.timestamp))
self.pool.addCommand(cmd)
self.pool.join()
self._process_results(di, self.pool)
finally:
self._cleanup_pool()
def validate_backup_dir(self):
"""
Validate that at least root directory of backup_dir exists, if full path of backup_dir
does not exist, then the permission should be allowed to create the full path.
If backup_dir is under any of segment or master data directory, error out
"""
root_backup_dir = os.path.join(os.sep, self.backup_dir.split(os.sep)[1])
if not os.path.exists(root_backup_dir):
raise Exception('Root backup directory is not valid, %s' % root_backup_dir)
logger.debug('Root backup directory is valid, %s' % root_backup_dir)
logger.debug('Validating read and write permission of backup directory %s' % self.backup_dir)
paths = self.backup_dir.split(os.sep)
for i in xrange(len(paths) - 1, 0, -1):
parentpath = os.path.join(os.sep, os.sep.join(paths[1:i+1]))
if os.path.exists(parentpath) and os.access(parentpath, os.R_OK | os.W_OK):
logger.debug('Permission is allowed for backup directory under %s' % parentpath)
break
elif os.path.exists(parentpath) and not os.access(parentpath, os.R_OK | os.W_OK):
raise Exception('Permission not allowed for Backup directory under path %s' % parentpath)
logger.debug('Validating the backup directory is not under any segment data directory')
for di in self.dbid_info:
for _, fsdir in di.filespace_dirs.items():
relpath = os.path.relpath(self.backup_dir, fsdir)
if not relpath.startswith('..'):
raise Exception('Backup directory is a sub directory of segment data directory %s' % fsdir)
else:
logger.debug('Backup directory %s is not under %s' % (self.backup_dir, fsdir))
logger.info('Backup directory validated and good')
class RebuildTable:
"""
This class performs the following final checks before starting the rebuild process
1. Check if the backup is present on the segment. In case a segment went down,
the mirror would take over and the backup might not be present on the mirror.
Hence we do this check.
2. Check if there are any contents that are down i.e Both primary and mirror are down.
In either of the above two cases we chose to fail so that the user can fix the issue
and rerun the tool.
"""
def __init__(self, dbid_info, has_mirrors=False, batch_size=DEFAULT_BATCH_SIZE, backup_dir=None):
self.gparray = None
self.dbid_info = dbid_info
self.has_mirrors = False
self.batch_size = batch_size
self.backup_dir = backup_dir
self.pool = None
def _cleanup_pool(self):
if self.pool:
self.pool.haltWork()
self.pool.joinWorkers()
self.pool = None
def _get_valid_dbids(self, content_ids):
"""Check to see if a content is down. i.e both primary and mirror"""
valid_dbids = []
for seg in self.gparray.getDbList():
if seg.getSegmentContentId() in content_ids:
if seg.getSegmentRole() == 'p' and seg.getSegmentStatus() == 'd':
raise Exception('Segment %s is down. Cannot continue with persistent table rebuild' % seg.getSegmentDataDirectory())
elif seg.getSegmentRole() == 'p' and seg.getSegmentMode() == 'r':
raise Exception('Segment %s is in resync. Cannot continue with persistent table rebuild' % seg.getSegmentDataDirectory())
elif seg.getSegmentRole() == 'p' and seg.getSegmentStatus() != 'd':
valid_dbids.append(seg.getSegmentDbId())
return valid_dbids
def _validate_backups(self):
RunBackupRestore(self.dbid_info, TIMESTAMP, self.batch_size, self.backup_dir).validate_backups()
def rebuild(self):
"""
If any of the validations fail, we chose to error out.
Otherwise we try to rebuild the persistent tables and return
a list of successful dbidinfo and failed dbidinfo
"""
self.gparray = GpArray.initFromCatalog(dbconn.DbURL(dbname=DEFAULT_DATABASE), utility=True)
logger.info('Validating backups')
self._validate_backups()
logger.info('Validating dbids')
content_ids = set([di.content for di in self.dbid_info])
valid_dbids = self._get_valid_dbids(content_ids)
valid_dbid_info = [di for di in self.dbid_info if di.dbid in valid_dbids]
successes, failures = [], []
rebuild_done = {}
operation_list = []
logger.info('Starting persistent table rebuild operation')
for di in valid_dbid_info:
if di.content != -1:
operation_list.append(RemoteOperation(RebuildTableOperation(di, self.has_mirrors), di.hostname))
else:
operation_list.append(RemoteOperation(RebuildTableOperation(di, False), di.hostname))
try:
ParallelOperation(operation_list, self.batch_size).run()
except Exception as e:
pass
for op in operation_list:
di = op.operation.dbid_info
try:
op.get_ret()
except Exception as e:
logger.debug('Table rebuild failed for content %s:%s ' % (di.content, str(e)))
failures.append((di, str(e)))
else:
successes.append(di)
return successes, failures
class RunBackupRestore:
"""
This is a wrapper class to invoke $GPHOME/sbin/gppersistent_backup.py
"""
def __init__(self, dbid_info, timestamp, batch_size=DEFAULT_BATCH_SIZE, backup_dir=None, validate_only=False):
self.dbid_info = dbid_info
self.timestamp = timestamp
self.batch_size = batch_size
self.backup_dir = backup_dir
self.pool = None
self.validate_source_files_only = '--validate-source-file-only' if validate_only else ''
def _get_host_to_dbid_info_map(self):
host_to_dbid_info_map = defaultdict(list)
for di in self.dbid_info:
host_to_dbid_info_map[di.hostname].append(di)
return host_to_dbid_info_map
def _process_results(self, pool, err_msg):
err = False
for completed_item in pool.getCompletedItems():
res = completed_item.get_results()
if res.rc != 0:
err = True
logger.error('************************************************')
logger.error('%s' % res.stdout.strip())
logger.error('************************************************')
if err:
raise Exception(err_msg)
def _run_backup_restore(self, host_to_dbid_info_map, restore=False, validate_backups=False, validate_backup_dir=False):
self.pool = WorkerPool(self.batch_size)
if restore:
option = '--restore'
err_msg = 'Restore of persistent table files failed'
elif validate_backups:
option = '--validate-backups'
err_msg = 'Validate of backups of persistent table files failed'
elif validate_backup_dir:
option = '--validate-backup-dir'
err_msg = 'Validate backup directory of persistent table files failed'
else:
option = '--backup'
err_msg = 'Backup of persistent table files failed'
verbose_logging = '--verbose' if gplog.logging_is_verbose() else ''
try:
for host in host_to_dbid_info_map:
pickled_backup_dbid_info = base64.urlsafe_b64encode(pickle.dumps(host_to_dbid_info_map[host]))
pickled_per_database_persistent_files = base64.urlsafe_b64encode(pickle.dumps(PER_DATABASE_PERSISTENT_FILES[host]))
pickled_global_persistent_files = base64.urlsafe_b64encode(pickle.dumps(GLOBAL_PERSISTENT_FILES[host]))
if self.backup_dir:
cmdStr = """$GPHOME/sbin/gppersistent_backup.py --timestamp %s %s %s --batch-size %s --backup-dir %s --perdbpt %s --globalpt %s %s %s""" %\
(self.timestamp, option, pickled_backup_dbid_info, self.batch_size, self.backup_dir, pickled_per_database_persistent_files,
pickled_global_persistent_files, self.validate_source_files_only, verbose_logging)
else:
cmdStr = """$GPHOME/sbin/gppersistent_backup.py --timestamp %s %s %s --batch-size %s --perdbpt %s --globalpt %s %s %s""" %\
(self.timestamp, option, pickled_backup_dbid_info, self.batch_size, pickled_per_database_persistent_files,
pickled_global_persistent_files, self.validate_source_files_only, verbose_logging)
cmd = Command('backup pt files on a host', cmdStr=cmdStr, ctxt=REMOTE, remoteHost=host)
self.pool.addCommand(cmd)
self.pool.join()
self._process_results(self.pool, err_msg)
finally:
self.pool.haltWork()
self.pool.joinWorkers()
self.pool = None
def backup(self):
host_to_dbid_info_map = self._get_host_to_dbid_info_map()
self._run_backup_restore(host_to_dbid_info_map)
def restore(self):
host_to_dbid_info_map = self._get_host_to_dbid_info_map()
self._run_backup_restore(host_to_dbid_info_map, restore=True)
def validate_backups(self):
host_to_dbid_info_map = self._get_host_to_dbid_info_map()
self._run_backup_restore(host_to_dbid_info_map, validate_backups=True)
def validate_backup_dir(self):
host_to_dbid_info_map = self._get_host_to_dbid_info_map()
self._run_backup_restore(host_to_dbid_info_map, validate_backup_dir=True)
class RebuildPersistentTables(Operation):
def __init__(self, content_id, contentid_file, backup, restore, batch_size, backup_dir):
self.content_id = content_id
self.contentid_file = contentid_file
self.backup = backup
self.restore = restore
self.batch_size = batch_size
self.backup_dir = backup_dir
self.content_info = None
self.has_mirrors = False
self.has_standby = False
self.gparray = None
self.pool = None
self.restore_state_file_location = '/tmp'
self.gpperfmon_file = '/tmp/gpperfmon_guc'
def _check_database_version(self):
"""
Checks if the database version is greater than or equal to 4.1.0.0
since the gp_persistent_reset_all and gp_persistent_build_all is
not supported on earlier versions
"""
if 'GPHOME' not in os.environ:
raise Exception('GPHOME not set in the environment')
gphome = os.environ['GPHOME']
db_version = gpversion.GpVersion(GpVersion.local('get version', gphome))
if db_version < gpversion.GpVersion('4.1.0.0'):
raise Exception('This tool is not supported on Greenplum version lower than 4.1.0.0')
def _stop_database(self):
"""
Set the validateAfter to be False in case if there are any segments' postmaster
process killed, cause gpstop will return non zero status code
"""
cmd = GpStop('Stop the greenplum database', fast=True)
cmd.run(validateAfter=False)
def _start_database(self, admin_mode=False):
"""
If admin_mode is set to True, it starts the database in
admin mode. Since gpstart does not have an option to start
in admin mode, we first start the entire database, then
we stop the master only and restart it in utility mode
so that it does not allow any connections
"""
cmd = GpStart('Start the greenplum databse')
cmd.run(validateAfter=True)
if admin_mode:
cmd = GpStop('Stop the greenplum database', masterOnly=True)
cmd.run(validateAfter=True)
cmd = GpStart('Start the greenplum master in admin mode', masterOnly=True)
cmd.run(validateAfter=True)
def _check_platform(self):
"""
Solaris platform will be deprecated soon, hence we
choose to support it only on Linux and OSX.
"""
operating_sys = platform.system()
if operating_sys != 'Linux' and operating_sys != 'Darwin':
raise Exception('This tool is only supported on Linux and OSX platforms')
def _validate_has_mirrors_and_standby(self):
"""
Validate whether the system is configured with or without
mirrors. This is required by gp_persistent_build_all function.
If even a single segment does not have mirror, we consider that
the entire system is configured without mirror. This does not
apply to standby master
"""
for seg in self.gparray.getDbList():
if self.has_mirrors and self.has_standby:
break
elif seg.getSegmentContentId() != -1 and seg.isSegmentMirror():
self.has_mirrors = True
elif seg.getSegmentContentId() == -1 and seg.isSegmentStandby():
self.has_standby = True
def _check_md5_prog(self):
md5prog = 'md5' if platform.system() == 'Darwin' else 'md5sum'
if not findCmdInPath(md5prog):
raise Exception('Unable to find %s program. Please make sure it is in PATH' % md5prog)
def _get_persistent_table_filenames(self):
GET_ALL_DATABASES = """select oid, datname from pg_database"""
PER_DATABASE_PT_FILES_QUERY = """SELECT relfilenode FROM pg_class WHERE oid IN (5094, 5095)"""
GLOBAL_PT_FILES_QUERY = """SELECT relfilenode FROM pg_class WHERE oid IN (5090, 5091, 5092, 5093)"""
content_to_primary_dbid_host_map = dict()
databases = defaultdict(list)
for dbidinfo in self.dbid_info:
if dbidinfo.role == 'm':
continue
hostname = dbidinfo.hostname
port = dbidinfo.port
dbid = dbidinfo.dbid
content = dbidinfo.content
globalfiles = []
content_to_primary_dbid_host_map[content] = dbid, hostname
with dbconn.connect(dbconn.DbURL(dbname=DEFAULT_DATABASE, hostname=hostname, port=port), utility=True) as conn:
res = dbconn.execSQL(conn, GLOBAL_PT_FILES_QUERY)
for r in res:
globalfiles.append(str(r[0]))
res = dbconn.execSQL(conn, GET_ALL_DATABASES)
for r in res:
databases[hostname,port].append((dbid, r[0], r[1]))
if len(globalfiles) != 4:
logger.error("Found: %s, expected: [gp_persistent_relation_node, gp_persistent_database_node,\
gp_persistent_tablespace_node, gp_persistent_filespace_node]" % globalfiles)
raise Exception("Missing relfilenode entry of global pesistent tables in pg_class, dbid %s" % dbid)
GLOBAL_PERSISTENT_FILES[hostname][dbid] = globalfiles
"""
We have to connect to each database in all segments to get the
relfilenode ids for per db persistent files.
"""
for hostname, port in databases:
dblist = databases[(hostname, port)]
ptfiles_dboid = defaultdict(list)
for dbid, dboid, database in dblist:
if database == 'template0': #Connections to template0 are not allowed so we skip
continue
with dbconn.connect(dbconn.DbURL(dbname=database, hostname=hostname, port=port), utility=True) as conn:
res = dbconn.execSQL(conn, PER_DATABASE_PT_FILES_QUERY)
for r in res:
ptfiles_dboid[int(dboid)].append(str(r[0]))
if int(dboid) not in ptfiles_dboid or len(ptfiles_dboid[int(dboid)]) != 2:
logger.error("Found: %s, Expected: [gp_relation_node, gp_relation_node_index]" % \
ptfiles_dboid[int(dboid)] if int(dboid) in ptfiles_dboid else "None")
raise Exception("Missing relfilenode entry of per database persistent tables in pg_class, dbid %s" % dbid)
PER_DATABASE_PERSISTENT_FILES[hostname][dbid] = ptfiles_dboid
"""
We also need to backup for mirrors and standby if they are configured
"""
if self.has_mirrors or self.has_standby:
for dbidinfo in self.dbid_info:
if dbidinfo.role == 'm' and not dbidinfo.is_down: # Checking if the mirror is down
content = dbidinfo.content
mirror_dbid = dbidinfo.dbid
mirror_hostname = dbidinfo.hostname
primary_dbid, primary_hostname = content_to_primary_dbid_host_map[content]
GLOBAL_PERSISTENT_FILES[mirror_hostname][mirror_dbid] = GLOBAL_PERSISTENT_FILES[primary_hostname][primary_dbid]
PER_DATABASE_PERSISTENT_FILES[mirror_hostname][mirror_dbid] = PER_DATABASE_PERSISTENT_FILES[primary_hostname][primary_dbid]
logger.debug('GLOBAL_PERSISTENT_FILES = %s' % GLOBAL_PERSISTENT_FILES)
logger.debug('PER_DATABASE_PERSISTENT_FILES = %s' % PER_DATABASE_PERSISTENT_FILES)
def print_warning(self):
"""
Prints out a warning to the user indicating that this tool should
only be run by Pivotal support. It also asks for confirmation
before proceeding.
"""
warning_msgs= ['****************************************************', 'This tool should only be run by Pivotal support.',
'Please contact Pivotal support for more information.', '****************************************************']
for warning_msg in warning_msgs:
logger.warning(warning_msg)
input = ask_yesno(None, 'Do you still wish to continue ?', 'N')
if not input:
raise Exception('Aborting rebuild due to user request')
def dump_restore_info(self):
"""
dump all object information into files, retrieve the information into object later on
"""
for name, obj in [('dbid_info', self.dbid_info), ('global_pt_file', GLOBAL_PERSISTENT_FILES), ('per_db_pt_file', PER_DATABASE_PERSISTENT_FILES)]:
restore_file = os.path.join(self.restore_state_file_location, name + '_' + TIMESTAMP)
with open(restore_file, 'wb') as fw:
pickle.dump(obj, fw)
def load_restore_info(self, timestamp=None):
"""
load the object information from dump file
"""
pt_restore_files = [os.path.join(self.restore_state_file_location, name + '_' + timestamp) for name in ['dbid_info', 'global_pt_file', 'per_db_pt_file']]
dbid_info_restore_file = pt_restore_files[0]
with open(dbid_info_restore_file, 'rb') as fr:
self.dbid_info = pickle.load(fr)
global_pt_file_restore_file = pt_restore_files[1]
with open(global_pt_file_restore_file, 'rb') as fr:
global GLOBAL_PERSISTENT_FILES
GLOBAL_PERSISTENT_FILES = pickle.load(fr)
per_db_pt_file_restore_file = pt_restore_files[2]
with open(per_db_pt_file_restore_file, 'rb') as fr:
global PER_DATABASE_PERSISTENT_FILES
PER_DATABASE_PERSISTENT_FILES = pickle.load(fr)
def run(self):
"""
Double warning to make sure that the customer
does not run this tool accidentally
"""
self.print_warning()
self.print_warning()
logger.info('Checking for platform')
self._check_platform()
logger.info('Checking for md5sum program')
self._check_md5_prog()
"""
If the restore fails, we do not attempt to restart the database since a restore is only done
when the PT rebuild has not succeeded. It might be dangerous to start the database when the
PT rebuild has failed in the middle and we cannot restore the original files safely.
"""
if self.restore:
logger.info('Loading global and per database persistent table files information from restore file, %s' % self.restore)
self.load_restore_info(self.restore)
"""
Before stopping the database, pre check all required persistent relfilenode and transaction logs exist from backup
"""
logger.info('Verifying backup directory for required persistent relfilenodes and transaction logs to restore')
try:
RunBackupRestore(self.dbid_info, self.restore, self.batch_size, self.backup_dir, validate_only=True).restore()
except Exception as e:
raise
"""
We want to stop the database so that we can restore the persistent table files.
"""
logger.info('Stopping Greenplum database')
self._stop_database()
logger.info('Restoring persistent table files, and all transaction logs from backup %s' % self.restore)
try:
RunBackupRestore(self.dbid_info, self.restore, self.batch_size, self.backup_dir).restore()
except Exception as e:
raise
else:
logger.info('Starting Greenplum database')
self._start_database()
return
self.gparray = GpArray.initFromCatalog(dbconn.DbURL())
logger.info('Checking for database version')
self._check_database_version()
logger.info('Validating if the system is configured with mirrors')
self._validate_has_mirrors_and_standby()
if self.has_mirrors:
logger.info('System has been configured with mirrors')
else:
logger.info('System has been configured without mirrors')
if self.has_standby:
logger.info('System has been configured with standby')
else:
logger.info('System has been configured without standby')
logger.info('Validating content ids')
valid_contentids = ValidateContentID(self.content_id, self.contentid_file, self.gparray).validate()
logger.info('Getting dbid information')
self.dbid_info = GetDbIdInfo(self.gparray, valid_contentids).get_info()
if self.backup_dir:
logger.info('Validating backup directory')
RunBackupRestore(self.dbid_info, TIMESTAMP, self.batch_size, self.backup_dir).validate_backup_dir()
"""
We have to get the information about pt filenames from the master before we
do any backup since the database will be down when we do a backup and this
information is required in order to do the backup.
"""
logger.info('Getting information about persistent table filenames')
self._get_persistent_table_filenames()
logger.info('Verifying data directory for required persistent relfilenodes and transaction logs to backup')
try:
RunBackupRestore(self.dbid_info, TIMESTAMP, self.batch_size, self.backup_dir, validate_only=True).backup()
except Exception as e:
raise
"""
If we want to start persistent table rebuild instead of only making backup, first need to save
the gpperfmon guc value into a file, then disable gpperfmon before shutdown cluster.
"""
if not self.backup:
self.dump_gpperfmon_guc()
self.disable_gpperfmon()
"""
We want to stop all transactions by pushing a checkpoint and stop the database so that we
can backup the persistent table files.
"""
logger.info('Pushing checkpoint')
try:
with dbconn.connect(dbconn.DbURL(dbname=DEFAULT_DATABASE, port=PGPORT)) as conn:
dbconn.execSQL(conn, 'CHECKPOINT')
conn.commit()
except Exception as e:
raise Exception('Failed to push a checkpoint, please contact support people')
logger.info('Stopping Greenplum database')
self._stop_database()
"""
If a backup fails, we still attempt to restart the database since the original files are
still present in their original location and we have not yet attempted to rebuild PT.
"""
logger.info('Backing up persistent file, and all transaction log files')
logger.info('Backup timestamp = %s' % TIMESTAMP)
try:
RunBackupRestore(self.dbid_info, TIMESTAMP, self.batch_size, self.backup_dir).backup()
"""
After we have created the backup copies with timestamp, we save the restore information
"""
logger.info('Dumpping restore information of global and per database persistent table files')
self.dump_restore_info()
except Exception as e:
if not self.backup:
logger.info('Setting back gpperfmon guc')
self.restore_gpperfmon_guc()
logger.info('Starting Greenplum database')
self._start_database()
raise
else:
if self.backup:
logger.info('Starting Greenplum database')
self._start_database()
return
finally:
logger.info("To check list of files backed up, see pt_bkup_restore log under /tmp of segment host")
"""
All the PT rebuild should be performed in utility mode in order to prevent
user activity during the PT rebuild
"""
logger.info('Starting database in admin mode')
self._start_database(admin_mode=True)
"""
Since the PT rebuild was done in admin mode, we need to restore the database
back to the normal mode once the PT rebuild is complete.
"""
logger.info('Starting rebuild of persistent tables')
try:
_, failures = RebuildTable(self.dbid_info, self.has_mirrors, self.batch_size, self.backup_dir).rebuild()
finally:
logger.info('Stopping Greenplum database that was started in admin mode')
self._stop_database()
if failures:
"""
If the PT rebuild failed for any reason, we need to restore the original PT files and transaction
log files.
If the restore of the original PT files fails, we want to error out
"""
logger.info('Restoring persistent table files, and all transaction log files')
RunBackupRestore([f for f, e in failures], TIMESTAMP, self.batch_size, self.backup_dir).restore()
"""
If we reach this point, either PT rebuild has completed successfully
or we have succesfully replaced the original PT files and all transaction
logs. Hence it is safe to restart the database
"""
logger.info('Setting back gpperfmon guc')
self.restore_gpperfmon_guc()
logger.info('Starting Greenplum database')
self._start_database()
if failures:
raise Exception('Persistent table rebuild was not completed succesfully and was restored back')
else:
logger.info('Completed rebuild of persistent tables')
logger.info('To verify, run: $GPHOME/bin/lib/gpcheckcat -R persistent -A')
def dump_gpperfmon_guc(self):
"""
We want to save the gpperfmon guc value into a file, in case the rebuild process failed in the middle, so that
the restore process can still pick up its original value and reset back.
"""
GET_GPPERFMON_VALUE = """show gp_enable_gpperfmon;"""
with dbconn.connect(dbconn.DbURL(dbname=DEFAULT_DATABASE)) as conn:
res = dbconn.execSQL(conn, GET_GPPERFMON_VALUE)
for r in res:
gpperfmon_guc = r[0]
logger.debug('Got gp_enable_gpperfmon guc value: %s' % gpperfmon_guc)
logger.info('Dumping gp_enable_gpperfmon guc information into file: %s' % self.gpperfmon_file)
with open(self.gpperfmon_file, 'w') as fw:
fw.write('gp_enable_gpperfmon=%s'% gpperfmon_guc)
def disable_gpperfmon(self):
logger.info('Disabling gpperfmon')
cmd = Command(name = 'Run gpconfig to set gpperfmon guc value off', cmdStr = 'gpconfig -c gp_enable_gpperfmon -v off')
cmd.run(validateAfter = True)
def restore_gpperfmon_guc(self):
"""Read the guc value from dump file and reset it from postgresql.conf on master """
logger.debug('Retriving original gp_enable_gpperfmon guc value')
with open(self.gpperfmon_file, 'r') as fr:
content = fr.readlines();
gpperfmon_guc_info = content[0].strip()
postgres_config_file = os.path.join(os.environ.get('MASTER_DATA_DIRECTORY'), 'postgresql.conf')
logger.debug('Resetting guc %s' % gpperfmon_guc_info)
for line in fileinput.FileInput(postgres_config_file, inplace = 1):
line = re.sub('(\s*)gp_enable_gpperfmon(\s*)=(\w+)', gpperfmon_guc_info, line)
print str(re.sub('\n', '', line))
logger.info('Completed reset of guc gp_enable_gpperfmon')
if __name__ == '__main__':
pass
| {
"content_hash": "029935696059939c28de8a7fecd477a6",
"timestamp": "",
"source": "github",
"line_count": 1469,
"max_line_length": 161,
"avg_line_length": 45.047651463580664,
"alnum_prop": 0.5719984888553079,
"repo_name": "xuegang/gpdb",
"id": "ee2fd1a1b38eee4589d7dbd6a2a978032be02652",
"size": "66623",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "gpMgmt/bin/gppylib/operations/persistent_rebuild.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5614"
},
{
"name": "Batchfile",
"bytes": "11028"
},
{
"name": "C",
"bytes": "35361773"
},
{
"name": "C++",
"bytes": "8133472"
},
{
"name": "CMake",
"bytes": "47394"
},
{
"name": "CSS",
"bytes": "7068"
},
{
"name": "Csound Score",
"bytes": "179"
},
{
"name": "Cucumber",
"bytes": "927827"
},
{
"name": "DTrace",
"bytes": "1160"
},
{
"name": "FORTRAN",
"bytes": "14777"
},
{
"name": "GDB",
"bytes": "576"
},
{
"name": "Groff",
"bytes": "703079"
},
{
"name": "HTML",
"bytes": "218703"
},
{
"name": "Java",
"bytes": "1011277"
},
{
"name": "Lex",
"bytes": "210708"
},
{
"name": "M4",
"bytes": "106028"
},
{
"name": "Makefile",
"bytes": "497542"
},
{
"name": "Objective-C",
"bytes": "24186"
},
{
"name": "PLSQL",
"bytes": "190951"
},
{
"name": "PLpgSQL",
"bytes": "53337057"
},
{
"name": "Perl",
"bytes": "4082990"
},
{
"name": "Perl6",
"bytes": "14219"
},
{
"name": "Python",
"bytes": "9782036"
},
{
"name": "Ruby",
"bytes": "3301"
},
{
"name": "SQLPL",
"bytes": "1892720"
},
{
"name": "Shell",
"bytes": "504084"
},
{
"name": "XS",
"bytes": "8309"
},
{
"name": "XSLT",
"bytes": "5779"
},
{
"name": "Yacc",
"bytes": "485235"
}
],
"symlink_target": ""
} |
from sensordatainterface.base_views import *
from django.views.generic import DetailView
from sensordatainterface.forms import *
# Detail View Generic.
class GenericDetailView(DetailView):
@method_decorator(login_required(login_url=LOGIN_URL))
def dispatch(self, *args, **kwargs):
return super(GenericDetailView, self).dispatch(*args, **kwargs)
class SiteVisitDetailView(DetailView):
model = FeatureAction
slug_field = 'actionid'
context_object_name = 'SiteVisit'
template_name = 'site-visits/details.html'
def get_context_data(self, **kwargs):
context = super(SiteVisitDetailView, self).get_context_data(**kwargs)
site_visits = Action.objects.filter(actiontypecv='Site Visit', featureaction__isnull=False)
previous_site_visit = site_visits.filter(
actionid__lt=context['SiteVisit'].actionid.actionid,
featureaction__samplingfeatureid=context['SiteVisit'].samplingfeatureid
).order_by('-actionid')
next_site_visit = site_visits.filter(
actionid__gt=context['SiteVisit'].actionid.actionid,
featureaction__samplingfeatureid=context['SiteVisit'].samplingfeatureid
).order_by('actionid')
if len(previous_site_visit) > 0:
context['previous_site_visit'] = previous_site_visit[0].actionid
else:
context['previous_site_visit'] = False
if len(next_site_visit) > 0:
context['next_site_visit'] = next_site_visit[0].actionid
else:
context['next_site_visit'] = False
return context
@method_decorator(login_required(login_url=LOGIN_URL))
def dispatch(self, *args, **kwargs):
return super(SiteVisitDetailView, self).dispatch(*args, **kwargs)
# Deployment Details needs it's own view since it depends on samplingfeatureid and equipmentid
class DeploymentDetail(DetailView):
queryset = Action.objects.filter(Q(actiontypecv='Instrument deployment') | Q(actiontypecv='Equipment deployment'))
slug_field = 'actionid'
context_object_name = 'Deployment'
template_name = 'site-visits/deployment/details.html'
def get_context_data(self, **kwargs):
context = super(DeploymentDetail, self).get_context_data(**kwargs)
##
# http://stackoverflow.com/questions/4034053/how-do-you-limit-get-next-by-foo-inside-a-django-view-code-included
##
# deployments = Action.objects.filter(
# Q(actiontypecv='Equipment deployment')
# | Q(actiontypecv='Instrument deployment')
# )
# this_samplingfeature = context['Deployment'].actionid.featureaction.values()[0]['samplingfeatureid_id']
# previous_deployment = deployments.filter(
# bridgeid__lt=context['Deployment'].bridgeid,
# actionid__featureaction__samplingfeatureid=this_samplingfeature
# ).order_by('-bridgeid')
# next_deployment = deployments.filter(
# bridgeid__gt=context['Deployment'].bridgeid,
# actionid__featureaction__samplingfeatureid=this_samplingfeature
# ).order_by('bridgeid')
#
# if len(previous_deployment) > 0:
# context['previous_deployment'] = previous_deployment[0].actionid.actionid
# else:
# context['previous_deployment'] = False
# if len(next_deployment) > 0:
# context['next_deployment'] = next_deployment[0].actionid.actionid
# else:
# context['next_deployment'] = False
return context
@method_decorator(login_required(login_url=LOGIN_URL))
def dispatch(self, *args, **kwargs):
return super(DeploymentDetail, self).dispatch(*args, **kwargs)
# Deployment Measured Variable detail view
class DeploymentMeasVariableDetailView(DetailView):
context_object_name = 'MeasuredVariable'
model = InstrumentOutputVariable
template_name = 'sites/measured-variable-details.html'
queryset = InstrumentOutputVariable.objects
def get_context_data(self, **kwargs):
context = super(DeploymentMeasVariableDetailView, self).get_context_data(**kwargs)
context['site_id'] = FeatureAction.objects.get(pk=self.kwargs['featureaction']).samplingfeatureid
context['deployment'] = EquipmentUsed.objects.get(pk=self.kwargs['equipmentused'])
context['equipment'] = context['deployment'].equipmentid
context['model'] = context['equipment'].equipmentmodelid
context['datalogger_file_column'] = DataloggerFileColumn.objects.filter(
instrumentoutputvariableid=context['MeasuredVariable'])
return context | {
"content_hash": "a44caf032aea72aebd7c3ff38e87da5c",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 120,
"avg_line_length": 42.61467889908257,
"alnum_prop": 0.6753498385360602,
"repo_name": "UCHIC/ODM2Sensor",
"id": "f0f0eca396d1a1fda6b97e63a63cdc0d1d022c18",
"size": "4645",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sensordatainterface/views/detail_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "9372"
},
{
"name": "HTML",
"bytes": "150935"
},
{
"name": "JavaScript",
"bytes": "46763"
},
{
"name": "Python",
"bytes": "485676"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sistema', '0019_auto_20161027_1147'),
]
operations = [
migrations.AlterField(
model_name='lote',
name='socio',
field=models.ForeignKey(blank=True, to='sistema.Socio', null=True),
),
]
| {
"content_hash": "e37f432d1f76b95804152326af38806c",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 79,
"avg_line_length": 22.444444444444443,
"alnum_prop": 0.5965346534653465,
"repo_name": "gabrielf10/webAmpunc",
"id": "cab6355441164cdad6ac43e4a66bce6d49848a4a",
"size": "428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sistema/migrations/0020_auto_20161030_1413.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "7387"
},
{
"name": "CSS",
"bytes": "133519"
},
{
"name": "HTML",
"bytes": "34001"
},
{
"name": "JavaScript",
"bytes": "578311"
},
{
"name": "PAWN",
"bytes": "2386"
},
{
"name": "Python",
"bytes": "74253"
}
],
"symlink_target": ""
} |
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Difference'] , ['Lag1Trend'] , ['BestCycle'] , ['MLP'] ); | {
"content_hash": "5b1635643bb4b1d75f9f0c07eb65b183",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 80,
"avg_line_length": 38.25,
"alnum_prop": 0.7058823529411765,
"repo_name": "antoinecarme/pyaf",
"id": "3c466da56d4f1810997d3eacf22331937b7a9901",
"size": "153",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/model_control/detailed/transf_Difference/model_control_one_enabled_Difference_Lag1Trend_BestCycle_MLP.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
} |
import datetime
import logging
from datetime import timedelta
from django import forms
from django.core.validators import ValidationError
from django.db.models import Q
from django.forms import widgets
from django.utils.translation import get_language
import happyforms
import jinja2
from tower import ugettext as _, ugettext_lazy as _lazy
import amo
import constants.editors as rvw
from addons.models import Addon, Persona
from amo.urlresolvers import reverse
from amo.utils import raise_required
from applications.models import AppVersion
from editors.helpers import (file_review_status, ReviewAddon, ReviewFiles,
ReviewHelper)
from editors.models import CannedResponse, ReviewerScore, ThemeLock
from editors.tasks import approve_rereview, reject_rereview, send_mail
from files.models import File
log = logging.getLogger('z.reviewers.forms')
ACTION_FILTERS = (('', ''), ('approved', _lazy(u'Approved reviews')),
('deleted', _lazy(u'Deleted reviews')))
ACTION_DICT = dict(approved=amo.LOG.APPROVE_REVIEW,
deleted=amo.LOG.DELETE_REVIEW)
class EventLogForm(happyforms.Form):
start = forms.DateField(required=False,
label=_lazy(u'View entries between'))
end = forms.DateField(required=False,
label=_lazy(u'and'))
filter = forms.ChoiceField(required=False, choices=ACTION_FILTERS,
label=_lazy(u'Filter by type/action'))
def clean(self):
data = self.cleaned_data
# We want this to be inclusive of the end date.
if 'end' in data and data['end']:
data['end'] += timedelta(days=1)
if 'filter' in data and data['filter']:
data['filter'] = ACTION_DICT[data['filter']]
return data
class BetaSignedLogForm(happyforms.Form):
VALIDATION_CHOICES = (
('', ''),
(amo.LOG.BETA_SIGNED_VALIDATION_PASSED.id,
_lazy(u'Passed automatic validation')),
(amo.LOG.BETA_SIGNED_VALIDATION_FAILED.id,
_lazy(u'Failed automatic validation')))
filter = forms.ChoiceField(required=False, choices=VALIDATION_CHOICES,
label=_lazy(u'Filter by automatic validation'))
class ReviewLogForm(happyforms.Form):
start = forms.DateField(required=False,
label=_lazy(u'View entries between'))
end = forms.DateField(required=False, label=_lazy(u'and'))
search = forms.CharField(required=False, label=_lazy(u'containing'))
def __init__(self, *args, **kw):
super(ReviewLogForm, self).__init__(*args, **kw)
# L10n: start, as in "start date"
self.fields['start'].widget.attrs = {'placeholder': _('start'),
'size': 10}
# L10n: end, as in "end date"
self.fields['end'].widget.attrs = {'size': 10, 'placeholder': _('end')}
# L10n: Description of what can be searched for
search_ph = _('add-on, editor or comment')
self.fields['search'].widget.attrs = {'placeholder': search_ph,
'size': 30}
def clean(self):
data = self.cleaned_data
# We want this to be inclusive of the end date.
if 'end' in data and data['end']:
data['end'] += timedelta(days=1)
return data
class QueueSearchForm(happyforms.Form):
text_query = forms.CharField(
required=False,
label=_lazy(u'Search by add-on name / author email'))
searching = forms.BooleanField(widget=forms.HiddenInput, required=False,
initial=True)
admin_review = forms.ChoiceField(required=False,
choices=[('', ''),
('1', _lazy(u'yes')),
('0', _lazy(u'no'))],
label=_lazy(u'Admin Flag'))
application_id = forms.ChoiceField(
required=False,
label=_lazy(u'Application'),
choices=([('', '')] +
[(a.id, a.pretty) for a in amo.APPS_ALL.values()]))
max_version = forms.ChoiceField(
required=False,
label=_lazy(u'Max. Version'),
choices=[('', _lazy(u'Select an application first'))])
waiting_time_days = forms.ChoiceField(
required=False,
label=_lazy(u'Days Since Submission'),
choices=([('', '')] +
[(i, i) for i in range(1, 10)] + [('10+', '10+')]))
addon_type_ids = forms.MultipleChoiceField(
required=False,
label=_lazy(u'Add-on Types'),
choices=((id, tp) for id, tp in amo.ADDON_TYPES.items()))
platform_ids = forms.MultipleChoiceField(
required=False,
label=_lazy(u'Platforms'),
choices=[(p.id, p.name)
for p in amo.PLATFORMS.values()
if p not in (amo.PLATFORM_ANY, amo.PLATFORM_ALL)])
def __init__(self, *args, **kw):
super(QueueSearchForm, self).__init__(*args, **kw)
w = self.fields['application_id'].widget
# Get the URL after the urlconf has loaded.
w.attrs['data-url'] = reverse('editors.application_versions_json')
def version_choices_for_app_id(self, app_id):
versions = AppVersion.objects.filter(application=app_id)
return [('', '')] + [(v.version, v.version) for v in versions]
def clean_addon_type_ids(self):
if self.cleaned_data['addon_type_ids']:
# Remove "Any Addon Extension" from the list so that no filter
# is applied in that case.
ids = set(self.cleaned_data['addon_type_ids'])
self.cleaned_data['addon_type_ids'] = ids - set(str(amo.ADDON_ANY))
return self.cleaned_data['addon_type_ids']
def clean_application_id(self):
if self.cleaned_data['application_id']:
choices = self.version_choices_for_app_id(
self.cleaned_data['application_id'])
self.fields['max_version'].choices = choices
return self.cleaned_data['application_id']
def clean_max_version(self):
if self.cleaned_data['max_version']:
if not self.cleaned_data['application_id']:
raise forms.ValidationError("No application selected")
return self.cleaned_data['max_version']
def filter_qs(self, qs):
data = self.cleaned_data
if data['admin_review']:
qs = qs.filter(admin_review=data['admin_review'])
if data['addon_type_ids']:
qs = qs.filter_raw('addon_type_id IN', data['addon_type_ids'])
if data['application_id']:
qs = qs.filter_raw('apps_match.application_id =',
data['application_id'])
# We join twice so it includes all apps, and not just the ones
# filtered by the search criteria.
app_join = ('LEFT JOIN applications_versions apps_match ON '
'(versions.id = apps_match.version_id)')
qs.base_query['from'].extend([app_join])
if data['max_version']:
joins = ["""JOIN applications_versions vs
ON (versions.id = vs.version_id)""",
"""JOIN appversions max_version
ON (max_version.id = vs.max)"""]
qs.base_query['from'].extend(joins)
qs = qs.filter_raw('max_version.version =',
data['max_version'])
if data['platform_ids']:
qs = qs.filter_raw('files.platform_id IN', data['platform_ids'])
# Adjust _file_platform_ids so that it includes ALL platforms
# not the ones filtered by the search criteria:
qs.base_query['from'].extend([
"""LEFT JOIN files all_files
ON (all_files.version_id = versions.id)"""])
group = 'GROUP_CONCAT(DISTINCT all_files.platform_id)'
qs.base_query['select']['_file_platform_ids'] = group
if data['text_query']:
lang = get_language()
joins = [
'LEFT JOIN addons_users au on (au.addon_id = addons.id)',
'LEFT JOIN users u on (u.id = au.user_id)',
"""LEFT JOIN translations AS supportemail_default ON
(supportemail_default.id = addons.supportemail AND
supportemail_default.locale=addons.defaultlocale)""",
"""LEFT JOIN translations AS supportemail_local ON
(supportemail_local.id = addons.supportemail AND
supportemail_local.locale=%%(%s)s)""" %
qs._param(lang),
"""LEFT JOIN translations AS ad_name_local ON
(ad_name_local.id = addons.name AND
ad_name_local.locale=%%(%s)s)""" %
qs._param(lang)]
qs.base_query['from'].extend(joins)
fuzzy_q = u'%' + data['text_query'] + u'%'
qs = qs.filter_raw(
Q('addon_name LIKE', fuzzy_q) |
# Search translated add-on names / support emails in
# the editor's locale:
Q('ad_name_local.localized_string LIKE', fuzzy_q) |
Q('supportemail_default.localized_string LIKE', fuzzy_q) |
Q('supportemail_local.localized_string LIKE', fuzzy_q) |
Q('au.role IN', [amo.AUTHOR_ROLE_OWNER,
amo.AUTHOR_ROLE_DEV],
'u.email LIKE', fuzzy_q))
if data['waiting_time_days']:
if data['waiting_time_days'] == '10+':
# Special case
args = ('waiting_time_days >=',
int(data['waiting_time_days'][:-1]))
else:
args = ('waiting_time_days <=', data['waiting_time_days'])
qs = qs.having(*args)
return qs
class AddonFilesMultipleChoiceField(forms.ModelMultipleChoiceField):
def label_from_instance(self, addon_file):
addon = addon_file.version.addon
# L10n: 0 = platform, 1 = filename, 2 = status message
return jinja2.Markup(_(u"<strong>%s</strong> · %s · %s")
% (addon_file.get_platform_display(),
addon_file.filename,
file_review_status(addon, addon_file)))
class NonValidatingChoiceField(forms.ChoiceField):
"""A ChoiceField that doesn't validate."""
def validate(self, value):
pass
class ReviewAddonForm(happyforms.Form):
addon_files = AddonFilesMultipleChoiceField(
required=False,
queryset=File.objects.none(), label=_lazy(u'Files:'),
widget=forms.CheckboxSelectMultiple())
comments = forms.CharField(required=True, widget=forms.Textarea(),
label=_lazy(u'Comments:'))
canned_response = NonValidatingChoiceField(required=False)
action = forms.ChoiceField(required=True, widget=forms.RadioSelect())
operating_systems = forms.CharField(required=False,
label=_lazy(u'Operating systems:'))
applications = forms.CharField(required=False,
label=_lazy(u'Applications:'))
notify = forms.BooleanField(required=False,
label=_lazy(u'Notify me the next time this '
'add-on is updated. (Subsequent '
'updates will not generate an '
'email)'))
adminflag = forms.BooleanField(required=False,
label=_lazy(u'Clear Admin Review Flag'))
clear_info_request = forms.BooleanField(
required=False, label=_lazy(u'Clear more info requested flag'))
def is_valid(self):
result = super(ReviewAddonForm, self).is_valid()
if result:
self.helper.set_data(self.cleaned_data)
return result
def __init__(self, *args, **kw):
self.helper = kw.pop('helper')
self.type = kw.pop('type', amo.CANNED_RESPONSE_ADDON)
super(ReviewAddonForm, self).__init__(*args, **kw)
self.fields['addon_files'].queryset = self.helper.all_files
self.addon_files_disabled = (
self.helper.all_files
# We can't review disabled, and public are already reviewed.
.filter(status__in=[amo.STATUS_DISABLED, amo.STATUS_PUBLIC])
.values_list('pk', flat=True))
# We're starting with an empty one, which will be hidden via CSS.
canned_choices = [['', [('', _('Choose a canned response...'))]]]
responses = CannedResponse.objects.filter(type=self.type)
# Loop through the actions (prelim, public, etc).
for k, action in self.helper.actions.iteritems():
action_choices = [[c.response, c.name] for c in responses
if c.sort_group and k in c.sort_group.split(',')]
# Add the group of responses to the canned_choices array.
if action_choices:
canned_choices.append([action['label'], action_choices])
# Now, add everything not in a group.
for r in responses:
if not r.sort_group:
canned_choices.append([r.response, r.name])
self.fields['canned_response'].choices = canned_choices
self.fields['action'].choices = [
(k, v['label']) for k, v in self.helper.actions.items()]
class ReviewFileForm(ReviewAddonForm):
def clean_addon_files(self):
files = self.data.getlist('addon_files')
if self.data.get('action', '') == 'prelim':
if not files:
raise ValidationError(_('You must select some files.'))
for pk in files:
file = self.helper.all_files.get(pk=pk)
if (file.status != amo.STATUS_UNREVIEWED and not
(self.helper.addon.status == amo.STATUS_LITE and
file.status == amo.STATUS_UNREVIEWED)):
raise ValidationError(_('File %s is not pending review.')
% file.filename)
return self.fields['addon_files'].queryset.filter(pk__in=files)
def get_review_form(data, request=None, addon=None, version=None):
helper = ReviewHelper(request=request, addon=addon, version=version)
form = {ReviewAddon: ReviewAddonForm,
ReviewFiles: ReviewFileForm}[helper.handler.__class__]
return form(data, helper=helper)
class MOTDForm(happyforms.Form):
motd = forms.CharField(required=True, widget=widgets.Textarea())
class DeletedThemeLogForm(ReviewLogForm):
def __init__(self, *args, **kwargs):
super(DeletedThemeLogForm, self).__init__(*args, **kwargs)
self.fields['search'].widget.attrs = {
# L10n: Description of what can be searched for.
'placeholder': _lazy(u'theme name'),
'size': 30}
class ThemeReviewForm(happyforms.Form):
theme = forms.ModelChoiceField(queryset=Persona.objects.all(),
widget=forms.HiddenInput())
action = forms.TypedChoiceField(
choices=rvw.REVIEW_ACTIONS.items(),
widget=forms.HiddenInput(attrs={'class': 'action'}),
coerce=int, empty_value=None
)
# Duplicate is the same as rejecting but has its own flow.
reject_reason = forms.TypedChoiceField(
choices=rvw.THEME_REJECT_REASONS.items() + [('duplicate', '')],
widget=forms.HiddenInput(attrs={'class': 'reject-reason'}),
required=False, coerce=int, empty_value=None)
comment = forms.CharField(
required=False, widget=forms.HiddenInput(attrs={'class': 'comment'}))
def clean_theme(self):
theme = self.cleaned_data['theme']
try:
ThemeLock.objects.get(theme=theme)
except ThemeLock.DoesNotExist:
raise forms.ValidationError(
_('Someone else is reviewing this theme.'))
return theme
def clean_reject_reason(self):
reject_reason = self.cleaned_data.get('reject_reason', None)
if (self.cleaned_data.get('action') == rvw.ACTION_REJECT
and reject_reason is None):
raise_required()
return reject_reason
def clean_comment(self):
# Comment field needed for duplicate, flag, moreinfo, and other reject
# reason.
action = self.cleaned_data.get('action')
reject_reason = self.cleaned_data.get('reject_reason')
comment = self.cleaned_data.get('comment')
if (not comment and (action == rvw.ACTION_FLAG or
action == rvw.ACTION_MOREINFO or
(action == rvw.ACTION_REJECT and
reject_reason == 0))):
raise_required()
return comment
def save(self):
action = self.cleaned_data['action']
comment = self.cleaned_data.get('comment')
reject_reason = self.cleaned_data.get('reject_reason')
theme = self.cleaned_data['theme']
is_rereview = (
theme.rereviewqueuetheme_set.exists() and
theme.addon.status not in (amo.STATUS_PENDING,
amo.STATUS_REVIEW_PENDING))
theme_lock = ThemeLock.objects.get(theme=self.cleaned_data['theme'])
mail_and_log = True
if action == rvw.ACTION_APPROVE:
if is_rereview:
approve_rereview(theme)
theme.addon.update(status=amo.STATUS_PUBLIC)
theme.approve = datetime.datetime.now()
theme.save()
elif action in (rvw.ACTION_REJECT, rvw.ACTION_DUPLICATE):
if is_rereview:
reject_rereview(theme)
else:
theme.addon.update(status=amo.STATUS_REJECTED)
elif action == rvw.ACTION_FLAG:
if is_rereview:
mail_and_log = False
else:
theme.addon.update(status=amo.STATUS_REVIEW_PENDING)
elif action == rvw.ACTION_MOREINFO:
if not is_rereview:
theme.addon.update(status=amo.STATUS_REVIEW_PENDING)
if mail_and_log:
send_mail(self.cleaned_data, theme_lock)
# Log.
amo.log(amo.LOG.THEME_REVIEW, theme.addon, details={
'theme': theme.addon.name.localized_string,
'action': action,
'reject_reason': reject_reason,
'comment': comment}, user=theme_lock.reviewer)
log.info('%sTheme %s (%s) - %s' % (
'[Rereview] ' if is_rereview else '', theme.addon.name,
theme.id, action))
score = 0
if action in (rvw.ACTION_REJECT, rvw.ACTION_DUPLICATE,
rvw.ACTION_APPROVE):
score = ReviewerScore.award_points(
theme_lock.reviewer, theme.addon, theme.addon.status)
theme_lock.delete()
return score
class ThemeSearchForm(forms.Form):
q = forms.CharField(
required=False, label=_lazy(u'Search'),
widget=forms.TextInput(attrs={'autocomplete': 'off',
'placeholder': _lazy(u'Search')}))
queue_type = forms.CharField(required=False, widget=forms.HiddenInput())
class ReviewAppLogForm(ReviewLogForm):
def __init__(self, *args, **kwargs):
super(ReviewAppLogForm, self).__init__(*args, **kwargs)
self.fields['search'].widget.attrs = {
# L10n: Description of what can be searched for.
'placeholder': _lazy(u'app, reviewer, or comment'),
'size': 30}
class WhiteboardForm(forms.ModelForm):
class Meta:
model = Addon
fields = ['whiteboard']
| {
"content_hash": "dde2f14313d0255c187fe3406acdda75",
"timestamp": "",
"source": "github",
"line_count": 482,
"max_line_length": 79,
"avg_line_length": 41.558091286307054,
"alnum_prop": 0.5659727422495132,
"repo_name": "Nolski/olympia",
"id": "596a60248d16ae20ece8e8bcf2cfee74458c08c8",
"size": "20031",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "apps/editors/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "249"
},
{
"name": "C",
"bytes": "4145"
},
{
"name": "CSS",
"bytes": "656398"
},
{
"name": "HTML",
"bytes": "1632540"
},
{
"name": "JavaScript",
"bytes": "1288996"
},
{
"name": "Makefile",
"bytes": "3574"
},
{
"name": "PLSQL",
"bytes": "74"
},
{
"name": "Python",
"bytes": "3928736"
},
{
"name": "Shell",
"bytes": "10335"
},
{
"name": "Smarty",
"bytes": "2229"
}
],
"symlink_target": ""
} |
from oslo.config import cfg
SQLALCHEMY_OPTIONS = (
cfg.StrOpt('uri', default='sqlite:///:memory:',
help='An sqlalchemy URL'),
)
SQLALCHEMY_GROUP = 'drivers:storage:sqlalchemy'
def _config_options():
return [(SQLALCHEMY_GROUP, SQLALCHEMY_OPTIONS)]
| {
"content_hash": "e0e4a3d5f7bf6c27f9eec46dcd9dc8c9",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 51,
"avg_line_length": 22.833333333333332,
"alnum_prop": 0.6678832116788321,
"repo_name": "rackerlabs/marconi",
"id": "2963afc8aa3af0afec63eec703115dce91021944",
"size": "856",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "marconi/queues/storage/sqlalchemy/options.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "663829"
}
],
"symlink_target": ""
} |
"""
Config sharing convention.
Reports service config protected by basic auth for securely running services
locally with realistic config.
"""
from json import dumps, loads
from microcosm.loaders.compose import PartitioningLoader
from microcosm_flask.audit import skip_logging
from microcosm_flask.conventions.base import Convention
from microcosm_flask.conventions.encoding import make_response
from microcosm_flask.namespaces import Namespace
from microcosm_flask.operations import Operation
class Config:
"""
Wrapper around service config state.
"""
def __init__(self, graph, include_build_info=True):
self.graph = graph
self.name = graph.metadata.name
def to_dict(self):
"""
Encode the name, the status of all checks, and the current overall status.
"""
if not isinstance(self.graph.loader, PartitioningLoader):
return dict(msg="Config sharing disabled for non-partioned loader")
if not hasattr(self.graph.loader, "secrets"):
return dict(msg="Config sharing disabled if no secrets are labelled")
def remove_nulls(dct):
return {key: value for key, value in dct.items() if value is not None}
return loads(
dumps(self.graph.loader.config, skipkeys=True, default=lambda obj: None),
object_hook=remove_nulls,
)
class ConfigDiscoveryConvention(Convention):
def __init__(self, graph):
super(ConfigDiscoveryConvention, self).__init__(graph)
self.config_discovery = Config(graph)
def configure_retrieve(self, ns, definition):
@self.add_route(ns.singleton_path, Operation.Retrieve, ns)
@skip_logging
def current_config_discovery():
response_data = self.config_discovery.to_dict()
return make_response(response_data, status_code=200)
def configure_config(graph):
"""
Configure the health endpoint.
:returns: the current service configuration
"""
ns = Namespace(
subject=Config,
)
convention = ConfigDiscoveryConvention(
graph,
)
convention.configure(ns, retrieve=tuple())
return convention.config_discovery
| {
"content_hash": "e87c683655b1ea1ea4c7b6b5f7effd13",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 85,
"avg_line_length": 29.05263157894737,
"alnum_prop": 0.6802536231884058,
"repo_name": "globality-corp/microcosm-flask",
"id": "160a880b322bfcfafca6201882bf9016fa0be297",
"size": "2208",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "microcosm_flask/conventions/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3203"
},
{
"name": "Python",
"bytes": "359772"
},
{
"name": "Shell",
"bytes": "1574"
}
],
"symlink_target": ""
} |
"""
Script to pull the latest changes from the SerGIS Server git repository and
put them in the web directory.
The defaults here assume:
- IIS with iisnode
- SerGIS Socket Server service set up through NSSM
But it can be easily modified for a different environment.
Before running this, make sure to set the configuration variables.
Usage:
python2 update_sergis-server.py
Fully update the server web directory by:
1. Pull the latest changes in the git repo.
2. Pull the latest changes in the git submodules.
3. Run `npm install` in the git repo directory.
4. Run `grunt dist` in the git repo directory.
5. Stop the SerGIS Socket Server service.
6. Set IIS permissions on the web directory.
7. Clear out the web directory.
8. Copy from the git repo to the web directory.
9. Copy config files to the web directory.
10. Creating the `uploads` directory.
11. Start the SerGIS Socket Server service
python2 update_sergis-server.py lite
Update the server web directory with the latest changes from the git repo
without reinstalling all dependencies.
1. Pull the latest changes in the git repo.
2. Pull the latest changes in the git submodules.
3. Run `grunt dist` in the git repo directory.
4. Stop the SerGIS Socket Server service.
5. Copy from the git repo to the web directory.
6. Copy config files to the web directory.
7. Start the SerGIS Socket Server service.
"""
import os, os.path, subprocess, shutil, sys, time
import win32com.shell.shell as shell
################################################################################
## Configuration
################################################################################
# Whether we are running the "lite" version (see above)
LITE = False
if len(sys.argv) > 1 and sys.argv[1] == "lite":
LITE = True
# The directory where the sergis-server git repo is
GIT_REPO = "C:\\sergis_files\\git\\sergis-server"
# The submodules in the sergis-server git repo
GIT_SUBMODULES = [
os.path.join(GIT_REPO, "sergis-client"),
os.path.join(GIT_REPO, "sergis-author")
]
# The web directory (where we're "publishing" to)
WEB_DIR = "C:\\sergis-server"
# When we're clearing out the web directory, which files/folders to ignore (i.e. keep)
WEB_DIR_CLEAR_IGNORE = ["iisnode"]
# The config.js file for SerGIS Server (to overwrite the repo default)
CONFIG_JS = "C:\\sergis_files\\sergis-server-config.js"
# Config files to copy to the web directory: [(from, to), ...]
CONFIG_FILES = [
# The node.js server config file
(CONFIG_JS, os.path.join(WEB_DIR, "config.js")),
# The iisnode config file (here, it's relative to the script location)
(os.path.join(os.path.dirname(os.path.realpath(__file__)), "sergis-server-web.config"), os.path.join(WEB_DIR, "web.config"))
]
# Config files requires to run grunt: [(from, to), ...]
GRUNT_CONFIG_FILES = [
# The node.js server config file
(CONFIG_JS, os.path.join(GIT_REPO, "config.js"))
]
# Ignored files (to not copy from the repo to the web directory)
GIT_REPO_IGNORE = [".git", "config.js", ".gitignore", ".gitmodules"]
# Additional ignored files if we're running in "lite mode"
GIT_REPO_IGNORE_LITE = ["node_modules"]
# The directory to the nodejs installation
NODE_DIR = "C:\\Program Files\\nodejs"
# The location of the grunt command
#GRUNT_PATH = os.path.expanduser("~\\AppData\\Roaming\\npm\\grunt.cmd")
GRUNT_PATH = "C:\\ProgramData\\npm\\grunt.cmd"
# The location of NSSM (see http://nssm.co/)
NSSM_PATH = "C:\\nssm\\win64\\nssm.exe"
# The NSSM service name
NSSM_SERVICE_NAME = "sergis-server-service"
# The location of the git executable (this tries to find GitHub's git if no other is specified)
GIT_PATH = "C:\\Program Files (x86)\\Git\\bin\\git.exe"
if not GIT_PATH:
GITHUB_DIR = os.path.expanduser("~\\AppData\\Local\\GitHub")
if os.path.exists(GITHUB_DIR):
for f in os.listdir(GITHUB_DIR):
if f[:12] == "PortableGit_":
GIT_PATH = os.path.join(GITHUB_DIR, f, "bin", "git.exe")
break
################################################################################
## Functions for doing different tasks to update the web directory
################################################################################
def updateGitRepos():
"""Update the git repository and any submodules."""
print "Running", GIT_PATH
print ""
print subprocess.check_output([GIT_PATH, "pull"], cwd=GIT_REPO)
print ""
for submod in GIT_SUBMODULES:
print subprocess.check_output([GIT_PATH, "pull"], cwd=submod)
print ""
def runNPM():
"""Run npm install"""
print "Running npm install"
subprocess.check_call([
os.path.join(NODE_DIR, "node.exe"),
os.path.join(NODE_DIR, "node_modules", "npm", "bin", "npm-cli.js"),
"install"
], cwd=GIT_REPO)
print ""
def runGrunt():
"""Run grunt dist"""
print "Running grunt dist"
# Firstly, copy in any config files
for src, dst in GRUNT_CONFIG_FILES:
if os.path.exists(dst):
os.rename(dst, dst + ".BAK")
shutil.copy(src, dst)
# Run grunt
subprocess.check_call([
GRUNT_PATH,
"dist"
], cwd=GIT_REPO)
# Reset any config files that we copied in
for src, dst in GRUNT_CONFIG_FILES:
if os.path.exists(dst) and os.path.exists(dst + ".BAK"):
os.unlink(dst)
os.rename(dst + ".BAK", dst)
print ""
def setIISPermissions():
"""Set proper permissions (only needs to be done once)"""
print "Setting permissions on", WEB_DIR
subprocess.check_call(["C:\\windows\\system32\\icacls.exe", WEB_DIR, "/grant", "IIS_IUSRS:(OI)(CI)F"])
print ""
def clearWebDirectory():
"""Clear out the directory"""
print "Clearing out", WEB_DIR
for item in os.listdir(WEB_DIR):
if not item in WEB_DIR_CLEAR_IGNORE:
file_path = os.path.join(WEB_DIR, item)
if os.path.isfile(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
print ""
def copyToWebDirectory():
"""Copy the latest files to the web directory"""
for item in os.listdir(GIT_REPO):
if not (item in GIT_REPO_IGNORE or (LITE and item in GIT_REPO_IGNORE_LITE)):
print "Copying", item
if os.path.isdir(os.path.join(GIT_REPO, item)):
if os.path.exists(os.path.join(WEB_DIR, item)):
shutil.rmtree(os.path.join(WEB_DIR, item))
shutil.copytree(os.path.join(GIT_REPO, item), os.path.join(WEB_DIR, item), ignore=shutil.ignore_patterns(*GIT_REPO_IGNORE))
else:
if os.path.exists(os.path.join(WEB_DIR, item)):
os.remove(os.path.join(WEB_DIR, item))
shutil.copy(os.path.join(GIT_REPO, item), WEB_DIR)
print ""
def copyConfigFiles():
"""Copy config files"""
for src, dst in CONFIG_FILES:
print "Copying", src, "to", dst
shutil.copy(src, dst)
print ""
def createUploadsDirectory():
"""Create the uploads directory in the web directory."""
if not os.path.exists(os.path.join(WEB_DIR, "uploads")):
os.mkdir(os.path.join(WEB_DIR, "uploads"))
def stopService():
"""Stop the NSSM service for the SerGIS Server socket server."""
shell.ShellExecuteEx(lpVerb="runas",
lpFile="cmd",
lpParameters="/K " + NSSM_PATH + " stop " + NSSM_SERVICE_NAME,
nShow=5,
fMask=256) # SEE_MASK_NOASYNC
# Let it recuperate
time.sleep(5)
def startService():
"""Restart the NSSM service for the SerGIS Server socket server."""
shell.ShellExecuteEx(lpVerb="runas",
lpFile="cmd",
lpParameters="/K " + NSSM_PATH + " start " + NSSM_SERVICE_NAME,
nShow=5,
fMask=256) # SEE_MASK_NOASYNC
# Let it recuperate
time.sleep(5)
################################################################################
## Actually run the tasks to update the web directory
################################################################################
def check():
"""Make sure that all required files/directories exist."""
if not os.path.exists(GIT_REPO):
print "Couldn't find GIT_REPO at", GIT_REPO
return False
if not os.path.exists(WEB_DIR):
print "Couldn't find WEB_DIR at", WEB_DIR
return False
if not os.path.exists(CONFIG_JS):
print "Couldn't find CONFIG_JS at", CONFIG_JS
return False
if not os.path.exists(NODE_DIR):
print "Couldn't find NODE_DIR at", NODE_DIR
return False
if not os.path.exists(GRUNT_PATH):
print "Couldn't find GRUNT_PATH at", GRUNT_PATH
return False
if not os.path.exists(NSSM_PATH):
print "Couldn't find NSSM_PATH at", NSSM_PATH
return False
if not os.path.exists(GIT_PATH):
print "Couldn't find GIT_PATH at", GIT_PATH
return False
# All seems good
return True
# Alrighty, let's get started!
if __name__ == "__main__":
if not GIT_PATH:
print "Couldn't find git!"
elif check():
updateGitRepos()
if not LITE:
runNPM()
runGrunt()
stopService()
if not LITE:
setIISPermissions()
clearWebDirectory()
copyToWebDirectory()
copyConfigFiles()
if not LITE:
createUploadsDirectory()
startService()
| {
"content_hash": "52d53544e74891542c80f62cf2737c2d",
"timestamp": "",
"source": "github",
"line_count": 279,
"max_line_length": 139,
"avg_line_length": 34.99641577060932,
"alnum_prop": 0.5910487505120852,
"repo_name": "sergisproject/server-scripts",
"id": "0310f6ec97cfa29975e2db8d2d2e150fe82136c1",
"size": "9764",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "windows/update_sergis-server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10638"
}
],
"symlink_target": ""
} |
import datetime
import operator
import ceilometer
from ceilometer.alarm.storage import base
from ceilometer.alarm.storage import models
from ceilometer.i18n import _
from ceilometer.openstack.common import log
from ceilometer.storage.hbase import base as hbase_base
from ceilometer.storage.hbase import migration as hbase_migration
from ceilometer.storage.hbase import utils as hbase_utils
from ceilometer import utils
LOG = log.getLogger(__name__)
AVAILABLE_CAPABILITIES = {
'alarms': {'query': {'simple': True,
'complex': False},
'history': {'query': {'simple': True,
'complex': False}}},
}
AVAILABLE_STORAGE_CAPABILITIES = {
'storage': {'production_ready': True},
}
class Connection(hbase_base.Connection, base.Connection):
"""Put the alarm data into a HBase database
Collections:
- alarm:
- row_key: uuid of alarm
- Column Families:
f: contains the raw incoming alarm data
- alarm_h:
- row_key: uuid of alarm + ":" + reversed timestamp
- Column Families:
f: raw incoming alarm_history data. Timestamp becomes now()
if not determined
"""
CAPABILITIES = utils.update_nested(base.Connection.CAPABILITIES,
AVAILABLE_CAPABILITIES)
STORAGE_CAPABILITIES = utils.update_nested(
base.Connection.STORAGE_CAPABILITIES,
AVAILABLE_STORAGE_CAPABILITIES,
)
_memory_instance = None
ALARM_TABLE = "alarm"
ALARM_HISTORY_TABLE = "alarm_h"
def __init__(self, url):
super(Connection, self).__init__(url)
def upgrade(self):
tables = [self.ALARM_HISTORY_TABLE, self.ALARM_TABLE]
column_families = {'f': dict()}
with self.conn_pool.connection() as conn:
hbase_utils.create_tables(conn, tables, column_families)
hbase_migration.migrate_tables(conn, tables)
def clear(self):
LOG.debug(_('Dropping HBase schema...'))
with self.conn_pool.connection() as conn:
for table in [self.ALARM_TABLE,
self.ALARM_HISTORY_TABLE]:
try:
conn.disable_table(table)
except Exception:
LOG.debug(_('Cannot disable table but ignoring error'))
try:
conn.delete_table(table)
except Exception:
LOG.debug(_('Cannot delete table but ignoring error'))
def update_alarm(self, alarm):
"""Create an alarm.
:param alarm: The alarm to create. It is Alarm object, so we need to
call as_dict()
"""
_id = alarm.alarm_id
alarm_to_store = hbase_utils.serialize_entry(alarm.as_dict())
with self.conn_pool.connection() as conn:
alarm_table = conn.table(self.ALARM_TABLE)
alarm_table.put(_id, alarm_to_store)
stored_alarm = hbase_utils.deserialize_entry(
alarm_table.row(_id))[0]
return models.Alarm(**stored_alarm)
create_alarm = update_alarm
def delete_alarm(self, alarm_id):
with self.conn_pool.connection() as conn:
alarm_table = conn.table(self.ALARM_TABLE)
alarm_table.delete(alarm_id)
def get_alarms(self, name=None, user=None, state=None, meter=None,
project=None, enabled=None, alarm_id=None, pagination=None,
alarm_type=None, severity=None):
if pagination:
raise ceilometer.NotImplementedError('Pagination not implemented')
if meter:
raise ceilometer.NotImplementedError(
'Filter by meter not implemented')
q = hbase_utils.make_query(alarm_id=alarm_id, name=name,
enabled=enabled, user_id=user,
project_id=project, state=state,
type=alarm_type, severity=severity)
with self.conn_pool.connection() as conn:
alarm_table = conn.table(self.ALARM_TABLE)
gen = alarm_table.scan(filter=q)
alarms = [hbase_utils.deserialize_entry(data)[0]
for ignored, data in gen]
for alarm in sorted(
alarms,
key=operator.itemgetter('timestamp'),
reverse=True):
yield models.Alarm(**alarm)
def get_alarm_changes(self, alarm_id, on_behalf_of,
user=None, project=None, alarm_type=None,
severity=None, start_timestamp=None,
start_timestamp_op=None, end_timestamp=None,
end_timestamp_op=None):
q = hbase_utils.make_query(alarm_id=alarm_id,
on_behalf_of=on_behalf_of, type=alarm_type,
user_id=user, project_id=project,
severity=severity)
start_row, end_row = hbase_utils.make_timestamp_query(
hbase_utils.make_general_rowkey_scan,
start=start_timestamp, start_op=start_timestamp_op,
end=end_timestamp, end_op=end_timestamp_op, bounds_only=True,
some_id=alarm_id)
with self.conn_pool.connection() as conn:
alarm_history_table = conn.table(self.ALARM_HISTORY_TABLE)
gen = alarm_history_table.scan(filter=q, row_start=start_row,
row_stop=end_row)
for ignored, data in gen:
stored_entry = hbase_utils.deserialize_entry(data)[0]
yield models.AlarmChange(**stored_entry)
def record_alarm_change(self, alarm_change):
"""Record alarm change event."""
alarm_change_dict = hbase_utils.serialize_entry(alarm_change)
ts = alarm_change.get('timestamp') or datetime.datetime.now()
rts = hbase_utils.timestamp(ts)
with self.conn_pool.connection() as conn:
alarm_history_table = conn.table(self.ALARM_HISTORY_TABLE)
alarm_history_table.put(
hbase_utils.prepare_key(alarm_change.get('alarm_id'), rts),
alarm_change_dict)
| {
"content_hash": "7cd65c9ebe9745daf5320a64abf4ea18",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 78,
"avg_line_length": 38.04242424242424,
"alnum_prop": 0.5755934363549466,
"repo_name": "yanheven/ceilometer",
"id": "7b0d43929e3a546d59c7acc238024201e353c87e",
"size": "6825",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ceilometer/alarm/storage/impl_hbase.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2969045"
},
{
"name": "Shell",
"bytes": "4227"
}
],
"symlink_target": ""
} |
from pkg1 import subpkg2
from pkg1.subpkg2 import mod2
from pkg1.subpkg2.mod2 import VAR
from subpkg1 import mod3
# malformed imports
from
from import
from pkg1.subpkg2 import
# absolute imports
import pkg1.subpkg2 as foo
from pkg1 import subpkg2 as bar
print(subpkg2, mod3, mod2, foo, bar, VAR)
| {
"content_hash": "4341041ef922f480bbe2520cbd4516df",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 41,
"avg_line_length": 19.933333333333334,
"alnum_prop": 0.7959866220735786,
"repo_name": "diorcety/intellij-community",
"id": "f3926e749629b47a9e2f5b0fe27b32a55de63aa5",
"size": "299",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/testData/refactoring/move/relativeImportsInsideMovedModule/after/src/subpkg1/mod1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "182"
},
{
"name": "Batchfile",
"bytes": "63659"
},
{
"name": "C",
"bytes": "214180"
},
{
"name": "C#",
"bytes": "1538"
},
{
"name": "C++",
"bytes": "190028"
},
{
"name": "CSS",
"bytes": "108843"
},
{
"name": "CoffeeScript",
"bytes": "1759"
},
{
"name": "Cucumber",
"bytes": "14382"
},
{
"name": "Erlang",
"bytes": "10"
},
{
"name": "FLUX",
"bytes": "57"
},
{
"name": "Groff",
"bytes": "35232"
},
{
"name": "Groovy",
"bytes": "2211774"
},
{
"name": "HTML",
"bytes": "1674627"
},
{
"name": "J",
"bytes": "5050"
},
{
"name": "JFlex",
"bytes": "166194"
},
{
"name": "Java",
"bytes": "146664059"
},
{
"name": "JavaScript",
"bytes": "125292"
},
{
"name": "Kotlin",
"bytes": "225274"
},
{
"name": "Makefile",
"bytes": "2352"
},
{
"name": "NSIS",
"bytes": "85938"
},
{
"name": "Objective-C",
"bytes": "28634"
},
{
"name": "Perl6",
"bytes": "26"
},
{
"name": "Protocol Buffer",
"bytes": "6570"
},
{
"name": "Python",
"bytes": "21485830"
},
{
"name": "Ruby",
"bytes": "1213"
},
{
"name": "Scala",
"bytes": "11698"
},
{
"name": "Shell",
"bytes": "63323"
},
{
"name": "Smalltalk",
"bytes": "64"
},
{
"name": "TeX",
"bytes": "60798"
},
{
"name": "TypeScript",
"bytes": "6152"
},
{
"name": "XSLT",
"bytes": "113040"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Colonia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=60)),
],
),
]
| {
"content_hash": "56be0cead4803d1046503761e49158b9",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 114,
"avg_line_length": 22.857142857142858,
"alnum_prop": 0.5708333333333333,
"repo_name": "agustin320/carpoolme",
"id": "b943a9c1369aec351115d64a2a72ec412e70cd07",
"size": "552",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "carpool/localidad/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "728"
},
{
"name": "HTML",
"bytes": "29247"
},
{
"name": "JavaScript",
"bytes": "2411"
},
{
"name": "Python",
"bytes": "22613"
}
],
"symlink_target": ""
} |
from ..postgresql.introspection import * # NOQA
| {
"content_hash": "ecd97718052aebd1f28e25376dddbd20",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 48,
"avg_line_length": 49,
"alnum_prop": 0.7551020408163265,
"repo_name": "letouriste001/SmartForest_2.0",
"id": "922f499a6dda8f9bd662932aa0b4ab6b190aba3a",
"size": "49",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python3.4Smartforest/lib/python3.4/site-packages/django/db/backends/postgresql_psycopg2/introspection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "68689"
},
{
"name": "Python",
"bytes": "6055383"
},
{
"name": "Shell",
"bytes": "3294"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
# noinspection PyCompatibility
from builtins import int, str
from datetime import date, datetime
from typing import Any, Dict, Union
from future.utils import viewitems
from zsl.utils.date_helper import format_date_portable, format_datetime_portable
DATE_DATA = 'date_data'
DATETIME_DATA = 'datetime_data'
RELATED_FIELDS = 'related_fields'
RELATED_FIELDS_CLASS = 'cls'
RELATED_FIELDS_HINTS = 'hints'
def extend_object_by_dict(target, dict_data, hints=None):
hints = _get_hints(hints)
for field_name, field_value in viewitems(dict_data):
if isinstance(field_value, (type(None), str, int, float, bool)):
if field_name in hints[DATE_DATA] and field_value is not None:
d = datetime.strptime(field_value,
hints[DATE_DATA][field_name]).date()
setattr(target, field_name, format_date_portable(d))
elif field_name in hints[DATETIME_DATA] and \
field_value is not None:
d = datetime.strptime(field_value,
hints[DATETIME_DATA][field_name])
setattr(target, field_name, format_datetime_portable(d))
else:
setattr(target, field_name, field_value)
elif isinstance(field_value, datetime):
setattr(target, field_name, format_datetime_portable(field_value))
elif isinstance(field_value, date):
setattr(target, field_name, format_date_portable(field_value))
elif field_name in hints[RELATED_FIELDS]:
related_cls = hints[RELATED_FIELDS][field_name][
RELATED_FIELDS_CLASS]
related_hints = hints[RELATED_FIELDS][field_name].get(
RELATED_FIELDS_HINTS)
if isinstance(field_value, (list, tuple)):
setattr(
target,
field_name,
[
related_cls(_to_dict(x), 'id', related_hints)
for x in field_value
]
)
else:
setattr(
target,
field_name,
related_cls(_to_dict(field_value), 'id', related_hints)
)
elif isinstance(field_value, (list, tuple)):
setattr(target, field_name, [x for x in field_value])
def _to_dict(val):
# type: (Union[Dict[str, Any], object]) -> Dict[str, Any]
return val if isinstance(val, dict) else val.__dict__
def _get_hints(original_hints):
if original_hints is None:
correct_hints = {DATE_DATA: {}, DATETIME_DATA: {}, RELATED_FIELDS: {}}
else:
correct_hints = original_hints
if DATE_DATA not in correct_hints:
correct_hints[DATE_DATA] = {}
if DATETIME_DATA not in correct_hints:
correct_hints[DATETIME_DATA] = {}
if RELATED_FIELDS not in correct_hints:
correct_hints[RELATED_FIELDS] = {}
return correct_hints
| {
"content_hash": "2dbdb0b873e7df79605eb82051111e7f",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 80,
"avg_line_length": 38.425,
"alnum_prop": 0.571893298633702,
"repo_name": "AtteqCom/zsl",
"id": "fb748712455448fa93a0f939ab9999f6ac987087",
"size": "3074",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/zsl/utils/dict_to_object_conversion.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1058"
},
{
"name": "Dockerfile",
"bytes": "1212"
},
{
"name": "Python",
"bytes": "373073"
},
{
"name": "Shell",
"bytes": "367"
}
],
"symlink_target": ""
} |
"""Main Controller"""
from tg import expose, flash, require, url, request, redirect
from pylons.i18n import ugettext as _, lazy_ugettext as l_
from catwalk.tg2 import Catwalk
from repoze.what import predicates
from brie.lib.base import BaseController
from brie.lib.aurore_helper import *
from brie import model
import brie.controllers.scheduler as scheduler
#from brie.controllers.secure import SecureController
import brie.controllers.auth as auth_handler
from brie.controllers.auth import AuthRestController
from brie.controllers.rooms import RoomsController
from brie.controllers.members import MembersController
from brie.controllers.show import ShowController
from brie.controllers.search import SearchController
from brie.controllers.edit import EditController
from brie.controllers.administration import AdministrationController
from brie.controllers.stats import StatsController
from brie.controllers.getemails import GetEmailsController
from brie.controllers.error import ErrorController
from brie.controllers.registration import RegistrationController
from brie.controllers.treasury import TreasuryController
from brie.controllers.plugins import PluginsController
__all__ = ['RootController']
class RootController(BaseController):
"""
The root controller for the Brie application.
All the other controllers and WSGI applications should be mounted on this
controller. For example::
panel = ControlPanelController()
another_app = AnotherWSGIApplication()
Keep in mind that WSGI applications shouldn't be mounted directly: They
must be wrapped around with :class:`tg.controllers.WSGIAppController`.
"""
# admin = Catwalk(model, DBSession)
auth = AuthRestController()
rooms = RoomsController()
show = ShowController()
edit = EditController(show)
members = MembersController(edit)
administration = AdministrationController()
stats = StatsController()
getemails = GetEmailsController()
error = ErrorController()
search = SearchController()
registration = RegistrationController(edit, administration)
treasury = TreasuryController()
plugins = PluginsController()
@expose('brie.templates.index')
def index(self):
user = auth_handler.current.get_user()
residence = None
if user is not None:
residence = Residences.get_name_by_dn(user, user.residence_dn)
#end if
return { "user" : user, "residence" : residence }
#end def
#end class
| {
"content_hash": "121fb21700448ee0f66decc230842dcd",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 77,
"avg_line_length": 33.8,
"alnum_prop": 0.7562130177514793,
"repo_name": "Rbeuque74/brie-aurore",
"id": "eeb6b3cce1e3db130426aa0095a5e21a4a7b005a",
"size": "2559",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Brie/brie/controllers/root.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "7815"
},
{
"name": "HTML",
"bytes": "91166"
},
{
"name": "JavaScript",
"bytes": "3085"
},
{
"name": "Python",
"bytes": "210870"
}
],
"symlink_target": ""
} |
import copy
import httplib2
from oslo_serialization import jsonutils as json
from oslotest import mockpatch
from tempest.services.compute.json import quota_classes_client
from tempest.tests import base
from tempest.tests import fake_auth_provider
class TestQuotaClassesClient(base.TestCase):
FAKE_QUOTA_CLASS_SET = {
"injected_file_content_bytes": 10240,
"metadata_items": 128,
"server_group_members": 10,
"server_groups": 10,
"ram": 51200,
"floating_ips": 10,
"key_pairs": 100,
"id": u'\u2740(*\xb4\u25e1`*)\u2740',
"instances": 10,
"security_group_rules": 20,
"security_groups": 10,
"injected_files": 5,
"cores": 20,
"fixed_ips": -1,
"injected_file_path_bytes": 255,
}
def setUp(self):
super(TestQuotaClassesClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = quota_classes_client.QuotaClassesClient(
fake_auth, 'compute', 'regionOne')
def _test_show_quota_class_set(self, bytes_body=False):
serialized_body = json.dumps({
"quota_class_set": self.FAKE_QUOTA_CLASS_SET})
if bytes_body:
serialized_body = serialized_body.encode('utf-8')
mocked_resp = (httplib2.Response({'status': 200}), serialized_body)
self.useFixture(mockpatch.Patch(
'tempest.common.service_client.ServiceClient.get',
return_value=mocked_resp))
resp = self.client.show_quota_class_set("test")
self.assertEqual(self.FAKE_QUOTA_CLASS_SET, resp)
def test_show_quota_class_set_with_str_body(self):
self._test_show_quota_class_set()
def test_show_quota_class_set_with_bytes_body(self):
self._test_show_quota_class_set(bytes_body=True)
def test_update_quota_class_set(self):
fake_quota_class_set = copy.deepcopy(self.FAKE_QUOTA_CLASS_SET)
fake_quota_class_set.pop("id")
serialized_body = json.dumps({"quota_class_set": fake_quota_class_set})
mocked_resp = (httplib2.Response({'status': 200}), serialized_body)
self.useFixture(mockpatch.Patch(
'tempest.common.service_client.ServiceClient.put',
return_value=mocked_resp))
resp = self.client.update_quota_class_set("test")
self.assertEqual(fake_quota_class_set, resp)
| {
"content_hash": "0b1ea7f32b320fce3929766ccc697b69",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 79,
"avg_line_length": 35.92537313432836,
"alnum_prop": 0.6364769422517657,
"repo_name": "tudorvio/tempest",
"id": "ff9b31031d312e4f7771f7e64a1929c03718b9e6",
"size": "3038",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tempest/tests/services/compute/test_quota_classes_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2734396"
},
{
"name": "Shell",
"bytes": "8578"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from ._Qt import QtCore, QtWidgets, QtGui
from pxr import Gf, Tf, Sdf
from . import compatability
# A Note on 'None'
#
# This file aims to provide editors that are compatable with all
# VtValues/SdfValueTypeNames that may appear in a Usd file. That means that
# 'None' may be a value that needs to be handled by a widget's SetValue (as
# 'None' may be returned by an attribute's Get). At the same time, calling Set
# with a value of None will raise an Exception, so value editors cannot not
# return None in their GetValue. When values are not explicitly defined (say an
# empty numeric value, we prefer to map the undefined field to the type's
# VtZero value. It may be tempting to try and equate a SetValue with a Block or
# a Clear, but that may be ambiguous.
#
# In short, Widgets MUST be instantiatable with None via SetValue and NEVER
# return None via GetValue.
class _ValueEditMetaclass(type(QtWidgets.QWidget)):
"""Metaclass used for all subclasses of _ValueEdit
Qt user properties are the magic that allows for the editors to
leverage a lot of default Qt behavior with respect to item delegates.
If 'valueType' is not None, then you MUST declare an implementation of
GetValue and SetValue.
Ideally, this would be achieved via inheritance or python decorators,
but the user property is defined via metaclass in PySide so we need to
approach the problem this way.
"""
def __new__(meta, name, bases, clsAttributes):
valueType = clsAttributes.get('valueType', None)
if valueType is not None:
if 'GetValue' in clsAttributes:
getter = clsAttributes['GetValue']
else:
for base in bases:
if hasattr(base, 'GetValue'):
getter = base.GetValue
break
else:
raise NotImplementedError(
"GetValue must be defined in class or parent.")
if 'SetValue' in clsAttributes:
setter = clsAttributes['SetValue']
else:
for base in bases:
if hasattr(base, 'SetValue'):
setter = base.SetValue
break
else:
raise NotImplementedError(
"SetValue must be defined in class or parent.")
clsAttributes['value'] = QtCore.Property(
valueType, getter, setter, user=True)
# NOTE: We're supposed to be able to declare a notify signal in the
# Qt property declaration. I haven't gotten it working so I've been
# manually defining it in each SetValue method. We should
# reevaluate this approach to declaring the value user property as
# it is a little convoluted.
return super(_ValueEditMetaclass, meta).__new__(
meta, name, bases, clsAttributes)
class _ValueEdit(QtWidgets.QWidget):
"""Infers Qt user property called 'value' from class variable 'valueType'.
Subclasses must set 'valueType' to be not None and implement 'GetValue',
'SetValue', and 'IsChanged'.
"""
__metaclass__ = _ValueEditMetaclass
valueType = None
def __init__(self, parent=None):
super(_ValueEdit, self).__init__(parent=parent)
def GetValue(self):
raise NotImplementedError()
def SetValue(self, value):
raise NotImplementedError()
def IsChanged(self):
"""Returns whether the widget should be considered changed by delegates.
There are several actions that can trigger setModelData in the
ValueDelegate. A custom IsChanged allows us to filter those out by
limiting the edits that will be considered a change.
(It would be nice to remove this if possible.)
"""
raise NotImplementedError()
def _SetupLayoutSpacing(self, layout):
layout.setSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSizeConstraint(QtWidgets.QLayout.SetNoConstraint)
class _LineEdit(_ValueEdit):
"""Parent class for any ValueEdit that contains one or more QLineEdits"""
def __init__(self, parent=None):
super(_LineEdit, self).__init__(parent=parent)
self.__changed = False
def _SetupLineEdit(self, lineEdit):
lineEdit.returnPressed.connect(self.__OnReturnPressed)
lineEdit.textEdited.connect(self.__OnTextEdited)
lineEdit.setFrame(False)
def __OnReturnPressed(self):
self.__changed = True
def __OnTextEdited(self, _):
self.__changed = True
def IsChanged(self):
"""Return true if return has been pressed or text has been edited.
See _ValueEdit.IsChanged for more information.
"""
return self.__changed
class _ComboEdit(_ValueEdit):
"""Parent class for any ValueEdit that contains a QComboBox"""
def __init__(self, choices, parent=None):
super(_ComboEdit, self).__init__(parent=parent)
self.__changed = False
self._comboBox = QtWidgets.QComboBox(self)
self._comboBox.addItems(choices)
self._comboBox.activated.connect(self.__OnActivated)
self.__layout = QtWidgets.QHBoxLayout()
self.__layout.addWidget(self._comboBox)
self._SetupLayoutSpacing(self.__layout)
self.setFocusProxy(self._comboBox)
self.setLayout(self.__layout)
def __OnActivated(self, _):
self.__changed = True
self.editFinished.emit()
def IsChanged(self):
"""Return true if an item has been activated.
See _ValueEdit.IsChanged for more information.
"""
return self.__changed
editFinished = QtCore.Signal()
class _NumericEdit(_LineEdit):
"""Base class for single line edit that contains a number.
Values can be limited via an option min and max value .Objects inheriting
from _NumericEdit should set the 'valueType' with the python numeric type
as well as a QValidator 'validatorType' class variable.
"""
valueType = None
validatorType = None
def __init__(self, minValue=None, maxValue=None, parent=None):
super(_NumericEdit, self).__init__(parent=parent)
self.__lineEdit = QtWidgets.QLineEdit(self)
self.__validator = self.validatorType(self)
self.__lineEdit.setValidator(self.__validator)
if minValue:
self.__validator.setBottom(minValue)
if maxValue:
self.__validator.setTop(maxValue)
self.__layout = QtWidgets.QHBoxLayout()
self.__layout.addWidget(self.__lineEdit)
self._SetupLayoutSpacing(self.__layout)
self.setLayout(self.__layout)
self.setFocusProxy(self.__lineEdit)
self._SetupLineEdit(self.__lineEdit)
# get the preferred string type of the current Qt context
self._stringType = type(self.__lineEdit.text())
def GetValue(self):
return self.valueType(self.__lineEdit.text())\
if len(self.__lineEdit.text()) > 0 else 0.0
def SetValue(self, value):
if value is None:
self.__lineEdit.clear()
return
stringValue = compatability.ResolveString(str(value), self._stringType)
if self.__validator.validate(stringValue, 0)[0] != QtGui.QValidator.Acceptable:
raise ValueError("%s not accepted by validator." % stringValue)
self.__lineEdit.setText(stringValue)
class _VecEdit(_LineEdit):
"""Base class for a line edit per component of a GfVec*
No custom implementation of Get and Set value are required.
You can effectively treat this is a C++ templated class where the template
parameters are the three class parameters.
'valueType' is the GfVec type. 'scalarType' (ie. int, float) is the type of
the individual elements. 'validatorType' is a sublcass of QValidator
used for 'scalarType' validation.
"""
valueType = None
scalarType = None
validatorType = None
def __init__(self, parent=None):
super(_VecEdit, self).__init__(parent=parent)
self.__layout = QtWidgets.QHBoxLayout()
self.__editors = []
self.__validator = self.validatorType()
for index in xrange(self.valueType.dimension):
self.__editors.append(QtWidgets.QLineEdit(self))
self.__editors[-1].setValidator(self.__validator)
self.__layout.addWidget(self.__editors[-1])
if index != 0:
self.setTabOrder(self.__editors[-2], self.__editors[-1])
self._SetupLineEdit(self.__editors[-1])
self.setTabOrder(self.__editors[-1], self.__editors[0])
self._SetupLayoutSpacing(self.__layout)
self.setLayout(self.__layout)
self.setFocusProxy(self.__editors[0])
# get the preferred string type of the current Qt context
self._stringType = type(self.__editors[0].text())
def GetValue(self):
text = (self.__editors[i].text()
for i in xrange(self.valueType.dimension))
return self.valueType(*(self.scalarType(t) if t else 0.0 for t in text))
def SetValue(self, value):
if value is None:
for index in xrange(self.valueType.dimension):
self.__editors[index].clear()
return
if len(value) != self.valueType.dimension:
raise ValueError("Input length %i does not match expected length "
"%i", len(value), self.valueType.dimension)
for index in xrange(self.valueType.dimension):
if value[index] is None:
raise ValueError("Value at %i is None", index)
string = compatability.ResolveString(
str(value[index]), self._stringType)
if self.__validator.validate(string, 0)[0] != QtGui.QValidator.Acceptable:
raise ValueError(
"%s (at index %i) not accepted by validator." %
(string, index))
self.__editors[index].setText(string)
class _MatrixEdit(_LineEdit):
"""Base class for a line edit per component of a GfMatrix*
No custom implementation of Get and Set value are required.
You can effectively treat this is a C++ templated class where the template
parameters are the three class parameters.
'valueType' is the GfVec type. 'scalarType' (ie. int, float) is the type of
the individual elements. 'validatorType' is a sublcass of QValidator
used for 'scalarType' validation.
"""
valueType = None
scalarType = None
validatorType = None
def __init__(self, parent=None):
super(_MatrixEdit, self).__init__(parent)
self.__layout = QtWidgets.QGridLayout(self)
self.__editors = []
self.__validator = self.validatorType()
for row in xrange(self.valueType.dimension[0]):
for column in xrange(self.valueType.dimension[1]):
self.__editors.append(QtWidgets.QLineEdit(self))
self.__editors[-1].setValidator(self.__validator)
self.__layout.addWidget(self.__editors[-1], row, column)
self._SetupLineEdit(self.__editors[-1])
if row != 0 and column != 0:
self.setTabOrder(self.__editors[-2], self.__editors[-1])
self.setTabOrder(self.__editors[-1], self.__editors[0])
self.setFocusProxy(self.__editors[0])
# self.setFocusPolicy(QtCore.Qt.StrongFocus)
self.setLayout(self.__layout)
self._SetupLayoutSpacing(self.__layout)
# get the preferred string type of the current Qt context
self._stringType = type(self.__editors[0].text())
def __GetIndex(self, row, column):
return row * self.valueType.dimension[1] + column
def GetValue(self):
text = (e.text() for e in self.__editors)
return self.valueType(*(self.scalarType(t) if t else 0.0 for t in text))
def SetValue(self, value):
numRows = self.valueType.dimension[0]
numColumns = self.valueType.dimension[1]
if value is None:
for e in self.__editors:
e.clear()
return
if len(value) != numRows:
raise ValueError(
"Input row size %i does not match expected length %i",
len(value), numRows)
for row in xrange(numRows):
if type(value) is str:
raise TypeError("Row cannot be string")
if len(value[row]) != numColumns:
raise ValueError("Input column size %i does not match expected "
"length %i", len(value[row]), numColumns)
for column in xrange(numColumns):
if value[row][column] is None:
raise ValueError("Value at (%i, %i) is None", row, column)
string = compatability.ResolveString(
str(value[row][column]), self._stringType)
if self.__validator.validate(string, 0)[0] != QtGui.QValidator.Acceptable:
raise ValueError(
"%s (at %i, %i) not accepted by validator." %
(string, row, column))
self.__editors[self.__GetIndex(row, column)].setText(string)
class IntEdit(_NumericEdit):
valueType = int
validatorType = QtGui.QIntValidator
class FloatEdit(_NumericEdit):
valueType = float
validatorType = QtGui.QDoubleValidator
class Vec3dEdit(_VecEdit):
valueType = Gf.Vec3d
scalarType = float
validatorType = QtGui.QDoubleValidator
class Vec3iEdit(_VecEdit):
valueType = Gf.Vec3i
scalarType = int
validatorType = QtGui.QIntValidator
class Vec2dEdit(_VecEdit):
valueType = Gf.Vec2d
scalarType = float
validatorType = QtGui.QDoubleValidator
class Vec2iEdit(_VecEdit):
valueType = Gf.Vec2i
scalarType = int
validatorType = QtGui.QIntValidator
class Vec4dEdit(_VecEdit):
valueType = Gf.Vec4d
scalarType = float
validatorType = QtGui.QDoubleValidator
class Vec4iEdit(_VecEdit):
valueType = Gf.Vec4i
scalarType = int
validatorType = QtGui.QIntValidator
class Matrix4dEdit(_MatrixEdit):
valueType = Gf.Matrix4d
scalarType = float
validatorType = QtGui.QDoubleValidator
class Matrix3dEdit(_MatrixEdit):
valueType = Gf.Matrix3d
scalarType = float
validatorType = QtGui.QDoubleValidator
class Matrix2dEdit(_MatrixEdit):
valueType = Gf.Matrix2d
scalarType = float
validatorType = QtGui.QDoubleValidator
class TextComboEdit(_ComboEdit):
valueType = str
def __init__(self, allowedValues, parent=None):
super(TextComboEdit, self).__init__(allowedValues, parent=parent)
self._reluctantValues = []
def GetValue(self):
return str(self._comboBox.currentText())
def SetValue(self, value):
value = value if value else ''
index = self._comboBox.findText(value)
if index < 0:
self._comboBox.addItem(value)
index = self._comboBox.findText(value)
self._reluctantValues.append(value)
assert(index >= 0)
self._comboBox.setCurrentIndex(index)
class BoolEdit(_ComboEdit):
valueType = bool
def __init__(self, parent=None):
super(BoolEdit, self).__init__(['false', 'true'], parent)
def GetValue(self):
return self._comboBox.currentText() == "true"
def SetValue(self, value):
if value:
self._comboBox.setCurrentIndex(1)
else:
self._comboBox.setCurrentIndex(0)
class StringEdit(_LineEdit):
valueType = str
def __init__(self, parent=None):
super(StringEdit, self).__init__(parent)
self.__lineEdit = QtWidgets.QLineEdit(self)
self.__layout = QtWidgets.QHBoxLayout()
self.__layout.addWidget(self.__lineEdit)
self._SetupLayoutSpacing(self.__layout)
self.setFocusProxy(self.__lineEdit)
self.setLayout(self.__layout)
self._SetupLineEdit(self.__lineEdit)
def GetValue(self):
return str(self.__lineEdit.text())
def SetValue(self, value):
if value is None:
self.__lineEdit.clear()
return
self.__lineEdit.setText(value)
class AssetEdit(_LineEdit):
valueType = Sdf.AssetPath
def __init__(self, parent=None):
super(AssetEdit, self).__init__(parent)
self.__lineEdit = QtWidgets.QLineEdit(self)
self.__layout = QtWidgets.QHBoxLayout()
self.__layout.addWidget(self.__lineEdit)
self._SetupLayoutSpacing(self.__layout)
self.setFocusProxy(self.__lineEdit)
self.setLayout(self.__layout)
self._SetupLineEdit(self.__lineEdit)
def GetValue(self):
text = str(self.__lineEdit.text())
return Sdf.AssetPath(text) if text else Sdf.AssetPath()
def SetValue(self, value):
if value is None:
self.__lineEdit.clear()
return
self.__lineEdit.setText(value.path)
class PathValidator(QtGui.QValidator):
"""A PathValidator ensures that the path is a valid SdfPath """
def __init__(self, parent=None):
super(PathValidator, self).__init__(parent)
def validate(self, string, pos):
value = compatability.ResolveString(string, str)
if value != '' and not Sdf.Path.IsValidPathString(value):
return (QtGui.QValidator.Intermediate, string, pos)
return (QtGui.QValidator.Acceptable, string, pos)
class PathEdit(_LineEdit):
valueType = Sdf.Path
def __init__(self, parent=None):
super(PathEdit, self).__init__(parent)
self.__validator = PathValidator()
self.__lineEdit = QtWidgets.QLineEdit(self)
self._stringType = type(self.__lineEdit.text())
self.__lineEdit.setValidator(self.__validator)
self.__layout = QtWidgets.QHBoxLayout()
self.__layout.addWidget(self.__lineEdit)
self._SetupLayoutSpacing(self.__layout)
self.setFocusProxy(self.__lineEdit)
self.setLayout(self.__layout)
self._SetupLineEdit(self.__lineEdit)
def GetValue(self):
text = str(self.__lineEdit.text())
return Sdf.Path(text) if text else Sdf.Path()
def SetValue(self, value):
if value is None:
self.__lineEdit.clear()
return
stringValue = compatability.ResolveString(str(value), self._stringType)
if self.__validator.validate(stringValue, 0)[0] != QtGui.QValidator.Acceptable:
raise ValueError("%s not accepted by validator." % stringValue)
self.__lineEdit.setText(stringValue)
class _ColorButton(QtWidgets.QPushButton):
'''The color button stores its color in DISPLAY space not LINEAR space'''
class _PainterContext(object):
def __init__(self, widget):
self.widget = widget
self._painter = None
def __enter__(self):
self._painter = QtGui.QPainter()
self._painter.begin(self.widget)
return self._painter
def __exit__(self, *args):
self._painter.end()
def __init__(self, parent=None):
super(_ColorButton, self).__init__(parent)
self._color = QtGui.QColor(255, 255, 255)
@property
def displayColor(self):
"""Returns color in display space"""
return self._color
@displayColor.setter
def displayColor(self, color):
"""Set color in display space"""
if self._color == color:
return
self._color = color
self.update()
def paintEvent(self, event):
super(_ColorButton, self).paintEvent(event)
# Paint a subset of the button the defined color
with self._PainterContext(self) as painter:
painter.setPen(QtGui.QPen(self._color))
painter.setBrush(QtGui.QBrush(self._color))
bounds = self.geometry()
area = QtCore.QRect(5, 5, bounds.width() - 11, bounds.height() - 11)
painter.drawRect(area)
class _ColorEdit(_ValueEdit):
'''Stores a color in LINEAR space'''
valueType = None
def __init__(self, parent=None):
super(_ColorEdit, self).__init__(parent)
self.__layout = QtWidgets.QHBoxLayout()
self._SetupLayoutSpacing(self.__layout)
self._colorButton = _ColorButton()
self._colorButton.setMaximumWidth(30)
self._colorButton.clicked.connect(self._OnPushed)
self._valueWidget = valueTypeMap[Tf.Type.Find(self.valueType)]()
self.__layout.addWidget(self._colorButton)
self.__layout.addWidget(self._valueWidget)
self.setLayout(self.__layout)
# TODO: There should be a way to more directly identify if one of the
# numeric widgets have changed.
for child in self._valueWidget.children():
if isinstance(child, QtWidgets.QLineEdit):
child.editingFinished.connect(self._SetButtonColor)
self._changed = False
def _SetButtonColor(self):
assert(self.valueType.dimension in (3, 4))
value = Gf.ConvertLinearToDisplay(self.value)
self._colorButton.displayColor = QtGui.QColor(
*[255 * v for v in value])
def _OnPushed(self):
if self.valueType.dimension in (3, 4):
options = QtWidgets.QColorDialog.ColorDialogOptions()
displayColor = QtGui.QColor(*[255 * v for v in
Gf.ConvertLinearToDisplay(self.value)])
if self.valueType.dimension == 4:
options = QtWidgets.QColorDialog.ShowAlphaChannel
newColor = QtWidgets.QColorDialog.getColor(
displayColor, self, unicode(self.valueType), options)
if newColor.isValid():
if self.valueType.dimension == 3:
value = (newColor.red(), newColor.green(), newColor.blue())
elif self.valueType.dimension == 4:
value = (newColor.red(), newColor.green(),
newColor.blue(), newColor.alpha())
value = self.valueType(*(v/255.0 for v in value))
value = Gf.ConvertDisplayToLinear(value)
self.value = self.valueType(*(round(v, 2) for v in value))
self._changed = True
def GetValue(self):
return self._valueWidget.value
def SetValue(self, value):
self._valueWidget.value = value
self._SetButtonColor()
def IsChanged(self):
return self._valueWidget.IsChanged() or self._changed
class Color3dEdit(_ColorEdit):
valueType = Gf.Vec3d
class Color4dEdit(_ColorEdit):
valueType = Gf.Vec4d
colorTypeMap = {
Tf.Type.Find(Gf.Vec3f): Color3dEdit,
Tf.Type.Find(Gf.Vec3d): Color3dEdit,
Tf.Type.Find(Gf.Vec3h): Color3dEdit,
Tf.Type.Find(Gf.Vec4f): Color4dEdit,
Tf.Type.Find(Gf.Vec4d): Color4dEdit,
Tf.Type.Find(Gf.Vec4h): Color4dEdit,
}
valueTypeMap = {
Tf.Type.FindByName('string'): StringEdit,
Tf.Type.FindByName('TfToken'): StringEdit,
Tf.Type.FindByName('SdfAssetPath'): AssetEdit,
Tf.Type.FindByName('SdfPath'): PathEdit,
Tf.Type.FindByName('unsigned char'):
functools.partial(IntEdit, minValue=0, maxValue=(2 << (8 - 1)) - 1),
Tf.Type.FindByName('unsigned int'):
functools.partial(IntEdit, minValue=0, maxValue=(2 << (32 - 1)) - 1),
Tf.Type.FindByName('unsigned long'):
functools.partial(IntEdit, minValue=0, maxValue=(2 << (64 - 1)) - 1),
Tf.Type.FindByName('int'): functools.partial(
IntEdit, minValue=-(2 << (32 - 1 - 1)), maxValue=(2 << (32 - 1 - 1)) - 1),
Tf.Type.FindByName('long'): functools.partial(
IntEdit, minValue=-(2 << (64 - 1 - 1)), maxValue=(2 << (64 - 1 - 1)) - 1),
Tf.Type.FindByName('half'): FloatEdit,
Tf.Type.FindByName('float'): FloatEdit,
Tf.Type.FindByName('double'): FloatEdit,
Tf.Type.Find(Gf.Vec2i): Vec2iEdit, Tf.Type.Find(Gf.Vec2f): Vec2dEdit,
Tf.Type.Find(Gf.Vec2d): Vec2dEdit, Tf.Type.Find(Gf.Vec2h): Vec2dEdit,
Tf.Type.Find(Gf.Vec3i): Vec3iEdit, Tf.Type.Find(Gf.Vec3f): Vec3dEdit,
Tf.Type.Find(Gf.Vec3f): Vec3dEdit, Tf.Type.Find(Gf.Vec3d): Vec3dEdit,
Tf.Type.Find(Gf.Vec4i): Vec4iEdit, Tf.Type.Find(Gf.Vec4f): Vec4dEdit,
Tf.Type.Find(Gf.Vec4h): Vec4dEdit, Tf.Type.Find(Gf.Vec4d): Vec4dEdit,
Tf.Type.Find(Gf.Matrix2f): Matrix2dEdit,
Tf.Type.Find(Gf.Matrix2d): Matrix2dEdit,
Tf.Type.Find(Gf.Matrix3f): Matrix3dEdit,
Tf.Type.Find(Gf.Matrix3d): Matrix3dEdit,
Tf.Type.Find(Gf.Matrix4f): Matrix4dEdit,
Tf.Type.Find(Gf.Matrix4d): Matrix4dEdit,
}
floatTypes = {Tf.Type.FindByName('half'),
Tf.Type.FindByName('float'), Tf.Type.FindByName('double')}
vecTypes = {Tf.Type.Find(Gf.Vec2i), Tf.Type.Find(Gf.Vec2f),
Tf.Type.Find(Gf.Vec2d), Tf.Type.Find(Gf.Vec2h),
Tf.Type.Find(Gf.Vec3i), Tf.Type.Find(Gf.Vec3f),
Tf.Type.Find(Gf.Vec3d), Tf.Type.Find(Gf.Vec3h),
Tf.Type.Find(Gf.Vec4i), Tf.Type.Find(Gf.Vec4f),
Tf.Type.Find(Gf.Vec4d), Tf.Type.Find(Gf.Vec4h)}
matrixTypes = {Tf.Type.Find(Gf.Matrix2f), Tf.Type.Find(Gf.Matrix2d),
Tf.Type.Find(Gf.Matrix3f), Tf.Type.Find(Gf.Matrix3d),
Tf.Type.Find(Gf.Matrix4f), Tf.Type.Find(Gf.Matrix4d)}
if __name__ == '__main__':
import sys
app = QtWidgets.QApplication(sys.argv)
layout = QtWidgets.QVBoxLayout()
widget = FloatEdit()
widget.value = .5
layout.addWidget(widget)
widget1 = Vec3dEdit()
widget1.value = (1, 2, 3)
layout.addWidget(widget1)
widget2 = StringEdit()
widget2.value = "one"
layout.addWidget(widget2)
widget3 = PathEdit()
widget3.value = Sdf.Path("/World")
layout.addWidget(widget3)
widget4 = Color3dEdit()
widget4.value = (.5, .5, .5)
layout.addWidget(widget4)
mainWidget = QtWidgets.QWidget()
mainWidget.setLayout(layout)
mainWidget.show()
sys.exit(app.exec_())
| {
"content_hash": "9c78dd6175a43fcbe27398b3d82b154a",
"timestamp": "",
"source": "github",
"line_count": 733,
"max_line_length": 90,
"avg_line_length": 35.74761255115962,
"alnum_prop": 0.6230965919932833,
"repo_name": "pxmkuruc/usd-qt",
"id": "f46570124a4528673b2fb3330764dbf2eaeacfee",
"size": "27263",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pxr/usdQt/valueWidgets.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1384"
},
{
"name": "C++",
"bytes": "154318"
},
{
"name": "CMake",
"bytes": "67878"
},
{
"name": "Python",
"bytes": "329057"
}
],
"symlink_target": ""
} |
import os.path
theme_path = os.path.dirname(__file__)
| {
"content_hash": "bc899158e95f12a86008290252fe241d",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 38,
"avg_line_length": 14,
"alnum_prop": 0.6785714285714286,
"repo_name": "qmcs/pelicanium",
"id": "d66519c9161a6651fb10cf0426c745a21d74cc64",
"size": "56",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pelicanium.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "58238"
},
{
"name": "JavaScript",
"bytes": "57747"
},
{
"name": "Python",
"bytes": "1001"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reports', '0010_auto_20170508_0943'),
]
operations = [
migrations.RemoveField(
model_name='report',
name='verified',
),
migrations.AddField(
model_name='report',
name='status',
field=models.CharField(choices=[('NR', 'Not Reviewed'), ('VF', 'Verified'), ('DN', 'Denied')], default='NR', max_length=2),
),
]
| {
"content_hash": "a0f8b6fe960373be5e9b2405972955f8",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 135,
"avg_line_length": 25.727272727272727,
"alnum_prop": 0.5565371024734982,
"repo_name": "johnobrien/edmw",
"id": "eeed9fe01f46e37ae0ae91616a668247e03d5f58",
"size": "637",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reports/migrations/0011_auto_20170509_1029.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2947"
},
{
"name": "HTML",
"bytes": "23622"
},
{
"name": "JavaScript",
"bytes": "892"
},
{
"name": "Python",
"bytes": "56991"
},
{
"name": "Shell",
"bytes": "4196"
}
],
"symlink_target": ""
} |
'''
Pseudo-random django secret key generator.
via https://gist.github.com/mattseymour/9205591
'''
import string
import random
# Get ascii Characters numbers and punctuation (minus quote characters as they could terminate string).
chars = ''.join([string.ascii_letters, string.digits, string.punctuation]).replace('\'', '').replace('"', '').replace('\\', '')
SECRET_KEY = ''.join([random.SystemRandom().choice(chars) for i in range(50)])
with open('.secret', 'w') as keyfile:
keyfile.write(SECRET_KEY)
| {
"content_hash": "e12386e2b83ac885086ab41d4674c0eb",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 127,
"avg_line_length": 33.8,
"alnum_prop": 0.7120315581854043,
"repo_name": "PrincessTeruko/TeruPages",
"id": "9ba4f6c5c14846f1622eb56a6c47ef693e39056b",
"size": "531",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "keygen.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27"
},
{
"name": "Python",
"bytes": "8457"
},
{
"name": "Shell",
"bytes": "137"
}
],
"symlink_target": ""
} |
import sys
from typing import Any, Callable, Dict, Optional, TypeVar, Union, cast
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._generate_detailed_cost_report_operation_results_operations import build_get_request
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class GenerateDetailedCostReportOperationResultsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.costmanagement.aio.CostManagementClient`'s
:attr:`generate_detailed_cost_report_operation_results` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
async def _get_initial(
self, operation_id: str, scope: str, **kwargs: Any
) -> Optional[_models.GenerateDetailedCostReportOperationResult]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-10-01"]
cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.GenerateDetailedCostReportOperationResult]]
request = build_get_request(
operation_id=operation_id,
scope=scope,
api_version=api_version,
template_url=self._get_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("GenerateDetailedCostReportOperationResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_initial.metadata = {"url": "/{scope}/providers/Microsoft.CostManagement/operationResults/{operationId}"} # type: ignore
@distributed_trace_async
async def begin_get(
self, operation_id: str, scope: str, **kwargs: Any
) -> AsyncLROPoller[_models.GenerateDetailedCostReportOperationResult]:
"""Gets the result of the specified operation. The link with this operationId is provided as a
response header of the initial request.
:param operation_id: The target operation Id. Required.
:type operation_id: str
:param scope: The ARM Resource ID for subscription, resource group, billing account, or other
billing scopes. For details, see https://aka.ms/costmgmt/scopes. Required.
:type scope: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either
GenerateDetailedCostReportOperationResult or the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.costmanagement.models.GenerateDetailedCostReportOperationResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-10-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.GenerateDetailedCostReportOperationResult]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_initial( # type: ignore
operation_id=operation_id,
scope=scope,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("GenerateDetailedCostReportOperationResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get.metadata = {"url": "/{scope}/providers/Microsoft.CostManagement/operationResults/{operationId}"} # type: ignore
| {
"content_hash": "a17b87e0e895b27d15b594152f3897e1",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 129,
"avg_line_length": 46.456647398843934,
"alnum_prop": 0.6682841856414085,
"repo_name": "Azure/azure-sdk-for-python",
"id": "9337dc341ee7efddf21b6800b18ce8f92b8b98c2",
"size": "8537",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/costmanagement/azure-mgmt-costmanagement/azure/mgmt/costmanagement/aio/operations/_generate_detailed_cost_report_operation_results_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
def insert_category_form(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
ResourceCategory = apps.get_model("web", "ResourceCategory")
if not ResourceCategory.objects.exists():
ResourceCategory.objects.create(index=1, name='Forms')
class Migration(migrations.Migration):
dependencies = [
('web', '0007_insert_default_helpful_links'),
]
operations = [
migrations.RunPython(insert_category_form),
]
| {
"content_hash": "65c6e5e009c0085d5ef7dbe407d5340f",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 73,
"avg_line_length": 29.5,
"alnum_prop": 0.7026194144838213,
"repo_name": "edilio/tobeawebproperty",
"id": "1db4bd5d0281cdcb44b419d4f759a2e98a95e5db",
"size": "673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "haweb/apps/web/migrations/0008_insert_default_category_forms_.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "89037"
},
{
"name": "HTML",
"bytes": "43969"
},
{
"name": "Python",
"bytes": "113389"
},
{
"name": "Shell",
"bytes": "1315"
}
],
"symlink_target": ""
} |
from __future__ import division
from iotbx.pdb.multimer_reconstruction import multimer
import iotbx.pdb
import mmtbx.f_model
from cctbx import xray
import scitbx.lbfgs
import getpass
import os
import sys
from scitbx.array_family import flex
ncs_1_copy="""\
MTRIX1 1 1.000000 0.000000 0.000000 0.00000 1
MTRIX2 1 0.000000 1.000000 0.000000 0.00000 1
MTRIX3 1 0.000000 0.000000 1.000000 0.00000 1
MTRIX1 2 0.496590 -0.643597 0.582393 0.00000
MTRIX2 2 0.867925 0.376088 -0.324443 0.00000
MTRIX3 2 -0.010221 0.666588 0.745356 0.00000
MTRIX1 3 -0.317946 -0.173437 0.932111 0.00000
MTRIX2 3 0.760735 -0.633422 0.141629 0.00000
MTRIX3 3 0.565855 0.754120 0.333333 0.00000
ATOM 1 N THR A 1 9.670 10.289 11.135 1.00 20.00 N
ATOM 2 CA THR A 1 9.559 8.931 10.615 1.00 20.00 C
ATOM 3 C THR A 1 9.634 7.903 11.739 1.00 20.00 C
ATOM 4 O THR A 1 10.449 8.027 12.653 1.00 20.00 O
ATOM 5 CB THR A 1 10.660 8.630 9.582 1.00 20.00 C
ATOM 6 OG1 THR A 1 10.560 9.552 8.490 1.00 20.00 O
ATOM 7 CG2 THR A 1 10.523 7.209 9.055 1.00 20.00 C
TER
"""
class minimizer(object):
def __init__(self,
fmodel,
ncs_transformations_object=None,
ncs_atom_selection = None,
run_finite_grad_differences_test = False,
max_iterations=100,
sites = False,
u_iso = False):
"""Implementing strict NCS to refinement minimization
Arguments:
fmodel : fmodel of the complete ASU
ncs_transformation_object : information on the NCS to ASU
transformations and chains. A multimer object
ncs_atom_selection : boolean array for selection of atoms in the NCS.
A flex bool array
"""
self.fmodel = fmodel
self.fmodel.xray_structure.scatterers().flags_set_grads(state=False)
self.x_target_functor = self.fmodel.target_functor()
self.sites = sites
self.u_iso = u_iso
self.ncs_to_asu = ncs_transformations_object
self.run_finite_grad_differences_test = run_finite_grad_differences_test
if run_finite_grad_differences_test:
# perform gradient calc test
self.buffer_max_grad = flex.double()
self.buffer_calc_grad = flex.double()
# xray structure of NCS chains for self.x
ncs_fmodel_xrs = self.fmodel.xray_structure.select(ncs_atom_selection)
if(self.sites):
self.x = ncs_fmodel_xrs.sites_cart().as_double()
if(self.u_iso):
assert ncs_fmodel_xrs.scatterers().size() == \
ncs_fmodel_xrs.use_u_iso().count(True)
self.x = ncs_fmodel_xrs.extract_u_iso_or_u_equiv()
# Use all scatterers for gradient calculations
if(self.sites):
xray.set_scatterer_grad_flags(
scatterers = self.fmodel.xray_structure.scatterers(),
site = True)
if(self.u_iso):
sel = flex.bool(
self.fmodel.xray_structure.scatterers().size(), True).iselection()
self.fmodel.xray_structure.scatterers().flags_set_grad_u_iso(
iselection = sel)
self.minimizer = scitbx.lbfgs.run(
target_evaluator=self,
termination_params=scitbx.lbfgs.termination_parameters(
max_iterations=max_iterations),
exception_handling_params=scitbx.lbfgs.exception_handling_parameters(
ignore_line_search_failed_rounding_errors=True,
ignore_line_search_failed_step_at_lower_bound=True,
ignore_line_search_failed_maxfev=True))
self.fmodel.xray_structure.tidy_us()
self.fmodel.xray_structure.apply_symmetry_sites()
self.fmodel.update_xray_structure(
xray_structure = self.fmodel.xray_structure,
update_f_calc = True)
self.tested = 0
if run_finite_grad_differences_test:
if self.buffer_max_grad:
print 'compare max_grad to calc_grad'
for a,f in zip(self.buffer_max_grad, self.buffer_calc_grad):
print '{0:10.5f} {1:10.5f} delta = {2:10.5f}'.format(a,f,abs(a-f))
print '-'*45
diff = flex.abs(self.buffer_max_grad - self.buffer_calc_grad)
s = diff < 1.e-3
if(s.size()>0 and s.count(True)*100./s.size()>50):
self.tested += 1
def compute_functional_and_gradients(self,compute_gradients=True):
"""(bool) -> float, flex.double array
Function which calculates the target function and gradients.
It is called by the lbfgs minimizer
Argument:
compute_gradients : When True gradients are calculated
"""
if(self.sites):
self.update_model_sites()
elif(self.u_iso):
self.update_model_asu_b_factors()
self.fmodel.update_xray_structure(
xray_structure = self.fmodel.xray_structure,
update_f_calc = True)
tgx = self.x_target_functor(compute_gradients=compute_gradients)
if(self.sites):
tx = tgx.target_work()
f = tx
if compute_gradients:
gx = flex.vec3_double(tgx.\
gradients_wrt_atomic_parameters(site=True).packed())
g = self.average_grad(grad=gx,apply_rotation=True).as_double()
if self.run_finite_grad_differences_test:
self.finite_difference_test(g)
if(self.u_iso):
tx = tgx.target_work()
f = tx
if compute_gradients:
gx = tgx.gradients_wrt_atomic_parameters(u_iso=True)
g = self.average_grad(grad=gx,apply_rotation=False).as_double()
if not compute_gradients:
g = None
return f, g
def update_model_sites(self,x=None):
"""
update fmodel using a complete ASU
Argument
x : sites coordinates of a single NCS
"""
if not x:
x = self.x
# rebuild the complete ASU coordinate from the NCS
x_asu = self.rebuild_asu_from_ncs_coordinates(x)
self.fmodel.xray_structure.set_sites_cart(
sites_cart = flex.vec3_double(x_asu))
def update_model_asu_b_factors(self,x_asu=None):
"""
update fmodel using a complete set of B-factors
All B-factors are the same
Argument
x_asu : B-factors of a single NCS
"""
if not x_asu:
x_asu = self.rebuild_asu_b_factors()
self.fmodel.xray_structure.set_u_iso(values = x_asu)
def rebuild_asu_b_factors(self):
"""
"""
n = self.ncs_to_asu.number_of_transforms + 1
x = list(self.x) * n
return flex.double(x)
def rebuild_asu_from_ncs_coordinates(self, x):
""" apply rotation and translation to x
Argument:
x : sites coordinates of single NCS
returns:
new_x : coordinates of the original x and all coordinates resulting
from application of rotation and translation.
type scitbx_array_family_flex_ext.double
"""
rotations = self.ncs_to_asu.rotation_matrices
translations = self.ncs_to_asu.translation_vectors
assert len(rotations)==len(translations)
new_x = list(x)
x = flex.vec3_double(x)
for r,t in zip(rotations,translations):
tmp_x = r.elems*x + t
new_x += list(tmp_x.as_double())
return flex.double(new_x)
def average_grad(self,grad,apply_rotation=False):
"""(vec3_double,bool) -> vec3_double
Argument:
grad : the gradient of the complete ASU
apply_rotation : If true, apply NCS rotation before averaging
Returns:
g_ave : The average the gradients of all NCS copies in the ASU
"""
n = self.ncs_to_asu.number_of_transforms
# gradients of the first NCS copy
ncs_end = len(grad)//(n+1)
assert ncs_end*(n+1)==len(grad)
g_ave = grad[:ncs_end]
for i in range(n):
g = grad[ncs_end*(i+1):ncs_end*(i+2)]
if apply_rotation:
# multiply the transpose of the rotation of each NCS copy
# gradients, by the gradients
rt = self.ncs_to_asu.rotation_matrices[i].transpose().elems
g = rt*g
g_ave += g
# average the NCS copies contributions
g_ave = g_ave.as_double()/(n+1)
if apply_rotation: g_ave = flex.vec3_double(g_ave)
assert type(grad)==type(g_ave)
return g_ave
def finite_difference_test(self,g):
"""
Run basic gradient test. compare numerical estimate gradient to
the largest calculated one. using t'(x)=(t(x+d)-t(x-d))/(2d)
Argument:
g : gradient, flex array
"""
if(self.fmodel.r_work()>1.e-3):
g = g.as_double()
d = 1.e-5
# find the index of the max gradient value
i_g_max = flex.max_index(flex.abs(g))
x_d = self.x
# calc t(x+d)
x_d[i_g_max] = self.x[i_g_max] + d
self.update_model_sites(x = x_d)
self.fmodel.update_xray_structure(update_f_calc=True)
t1,_ = self.compute_functional_and_gradients(compute_gradients=False)
# calc t(x-d)
x_d[i_g_max] = self.x[i_g_max] - d
self.update_model_sites(x = x_d)
del x_d
self.fmodel.update_xray_structure(update_f_calc=True)
t2,_ = self.compute_functional_and_gradients(compute_gradients=False)
# Return fmodel to the correct coordinates values
self.update_model_sites(x = self.x)
self.fmodel.update_xray_structure(update_f_calc=True)
self.buffer_max_grad.append(g[i_g_max])
self.buffer_calc_grad.append((t1-t2)/(d*2))
def save_pdb_file(macro_cycle,fmodel,m_shaken,u_iso,sites):
"""
Save pdb file for visualization
"""
method = ''
if sites: method += '_sites'
if u_iso: method += '_u_iso'
n = str(macro_cycle)
if len(n)==1: n = '0'+n
fn = 'refinement_by{0}_cycle_{1}.pdb'.format(method,macro_cycle)
xrs_refined = fmodel.xray_structure
m_shaken.assembled_multimer.adopt_xray_structure(xrs_refined)
m_shaken.write(fn)
def create_pymol_movie():
"""create pymol movie
"""
from glob import glob
import pymol
file_list = glob("refinement*.pdb")
pymol.finish_launching()
pymol.cmd.bg_color('white')
for i,fn in enumerate(file_list):
pymol.cmd.load(fn,"mov",state=i)
# pymol.cmd.load('full_asu.pdb',"mov",state=i)
# pymol.cmd.load(fn)
# pymol.cmd.load('full_asu.pdb')
# pymol.cmd.frame(1)
pymol.cmd.mview('store')
#
pymol.cmd.mset("1 -%d"%len(file_list))
def run(
n_macro_cycle=10,
sites=True,
u_iso=False,
run_finite_grad_differences_test = False):
"""
Arguments:
__________
n_macro_cycle : Number of refinement macro cycles
"""
# 1 NCS copy: starting template to generate whole asu; place into P1 box
pdb_inp = iotbx.pdb.input(source_info=None, lines=ncs_1_copy)
mtrix_object = pdb_inp.process_mtrix_records()
ph = pdb_inp.construct_hierarchy()
xrs = pdb_inp.xray_structure_simple()
xrs_one_ncs = xrs.orthorhombic_unit_cell_around_centered_scatterers(
buffer_size=8)
ph.adopt_xray_structure(xrs_one_ncs)
of = open("one_ncs_in_asu.pdb", "w")
print >> of, mtrix_object.format_MTRIX_pdb_string()
print >> of, ph.as_pdb_string(crystal_symmetry=xrs_one_ncs.crystal_symmetry())
of.close()
# 1 NCS copy -> full asu (expand NCS). This is the answer-strucure
m = multimer("one_ncs_in_asu.pdb",'cau',error_handle=True,eps=1e-2)
assert m.number_of_transforms == 2, m.number_of_transforms
xrs_asu = m.assembled_multimer.extract_xray_structure(
crystal_symmetry = xrs_one_ncs.crystal_symmetry())
m.write("full_asu.pdb")
assert xrs_asu.crystal_symmetry().is_similar_symmetry(
xrs_one_ncs.crystal_symmetry())
# Generate Fobs from answer structure
f_obs = abs(xrs_asu.structure_factors(d_min=2, algorithm="direct").f_calc())
r_free_flags = f_obs.generate_r_free_flags()
mtz_dataset = f_obs.as_mtz_dataset(column_root_label="F-obs")
mtz_dataset.add_miller_array(
miller_array=r_free_flags,
column_root_label="R-free-flags")
mtz_object = mtz_dataset.mtz_object()
mtz_object.write(file_name = "data.mtz")
# Shake structure - subject to refinement input
xrs_shaken = xrs_one_ncs.deep_copy_scatterers()
if sites:
xrs_shaken.shake_sites_in_place(mean_distance=0.3)
if u_iso:
xrs_shaken.shake_adp()
ph.adopt_xray_structure(xrs_shaken)
of = open("one_ncs_in_asu_shaken.pdb", "w")
print >> of, mtrix_object.format_MTRIX_pdb_string()
print >> of, ph.as_pdb_string(crystal_symmetry=xrs.crystal_symmetry())
of.close()
### Refinement
params = mmtbx.f_model.sf_and_grads_accuracy_master_params.extract()
params.algorithm = "direct"
# Get the xray_structure of the shaken ASU
m_shaken = multimer(
pdb_input_file_name="one_ncs_in_asu_shaken.pdb",
reconstruction_type='cau',error_handle=True,eps=1e-2)
xrs_shaken_asu = m_shaken.assembled_multimer.as_pdb_input().\
xray_structure_simple(crystal_symmetry=xrs_one_ncs.crystal_symmetry())
# Save the shaken ASU for inspection
m_shaken.write(pdb_output_file_name='asu_shaken.pdb')
# Create a boolean selection string for selecting chains in NCS
selection_str = 'chain A'
ncs_selection = m_shaken.assembled_multimer.\
atom_selection_cache().selection(selection_str)
fmodel = mmtbx.f_model.manager(
f_obs = f_obs,
r_free_flags = r_free_flags,
xray_structure = xrs_shaken_asu,
sf_and_grads_accuracy_params = params,
target_name = "ls_wunit_k1")
print "start r_factor: %6.4f" % fmodel.r_work()
refine_method = 'sites'
for macro_cycle in xrange(n_macro_cycle):
# refine coordinates
if(sites):
minimized = minimizer(
fmodel = fmodel,
ncs_transformations_object=m,
ncs_atom_selection = ncs_selection,
run_finite_grad_differences_test = run_finite_grad_differences_test,
sites = True)
print " macro_cycle %3d (sites) r_factor: %6.4f"%(macro_cycle,
fmodel.r_work())
# refine ADPs
if(u_iso):
minimized = minimizer(
fmodel = fmodel,
ncs_transformations_object=m,
ncs_atom_selection = ncs_selection,
run_finite_grad_differences_test = run_finite_grad_differences_test,
u_iso = True)
print " macro_cycle %3d (adp) r_factor: %6.4f"%(macro_cycle,
fmodel.r_work())
if (0): save_pdb_file(
macro_cycle=macro_cycle,
fmodel=fmodel,
m_shaken=m_shaken,
u_iso=u_iso,
sites=sites)
if (1): save_pdb_file(
macro_cycle=macro_cycle,
fmodel=fmodel,
m_shaken=m_shaken,
u_iso=u_iso,
sites=sites)
# create_pymol_movie()
def set_test_folder():
"""
Change working directory to avoid littering of
phenix_sources\phenix_regression\development\ncs_constraints.py
"""
username = getpass.getuser()
if username.lower() == 'youval':
osType = sys.platform
if osType.startswith('win'):
tempdir = (r'C:\Phenix\Dev\Work\work\NCS\junk')
else:
tempdir = ('/net/cci/youval/Work/work/NCS/junk')
os.chdir(tempdir)
if __name__ == "__main__":
set_test_folder()
run(n_macro_cycle=20,
sites=True,
u_iso=False,
run_finite_grad_differences_test = False) | {
"content_hash": "cb9c78595dd877fb7a2b06f6eb88b089",
"timestamp": "",
"source": "github",
"line_count": 412,
"max_line_length": 80,
"avg_line_length": 36.26699029126213,
"alnum_prop": 0.6383348949270513,
"repo_name": "youdar/work",
"id": "271941862362dc98423ab0dc905279b724a095ce",
"size": "14942",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "work/NCS/test_files/run_03_original.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "6474"
},
{
"name": "C++",
"bytes": "396"
},
{
"name": "Jupyter Notebook",
"bytes": "414"
},
{
"name": "Makefile",
"bytes": "6783"
},
{
"name": "Matlab",
"bytes": "7487"
},
{
"name": "Python",
"bytes": "1492762"
},
{
"name": "Scheme",
"bytes": "679"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'FeaturedRep.user'
db.delete_column(u'featuredrep_featuredrep', 'user_id')
def backwards(self, orm):
# Adding field 'FeaturedRep.user'
db.add_column(u'featuredrep_featuredrep', 'user',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['auth.User']),
keep_default=False)
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'featuredrep.featuredrep': {
'Meta': {'ordering': "['-updated_on']", 'object_name': 'FeaturedRep'},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reps_featured'", 'to': u"orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'updated_on': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'featuredrep_users'", 'symmetrical': 'False', 'to': u"orm['auth.User']"})
}
}
complete_apps = ['featuredrep'] | {
"content_hash": "fb9a41665bf00b594837c056f15ae284",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 187,
"avg_line_length": 66.02898550724638,
"alnum_prop": 0.5625548726953468,
"repo_name": "abdullah2891/remo",
"id": "2feab688d07570c757ad1faca88c9674eb81a6fc",
"size": "4580",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "remo/featuredrep/migrations/0006_auto__del_field_featuredrep_user.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "993"
},
{
"name": "Batchfile",
"bytes": "4531"
},
{
"name": "CSS",
"bytes": "372453"
},
{
"name": "HTML",
"bytes": "373393"
},
{
"name": "JavaScript",
"bytes": "606447"
},
{
"name": "Makefile",
"bytes": "4630"
},
{
"name": "Puppet",
"bytes": "7140"
},
{
"name": "Python",
"bytes": "7467560"
},
{
"name": "Shell",
"bytes": "3221"
},
{
"name": "Smarty",
"bytes": "215"
},
{
"name": "TeX",
"bytes": "1525"
}
],
"symlink_target": ""
} |
import logging
import sys
import numpy as np
from .... import Executor
from .... import io
from .... import core, scope_guard
from ....compiler import CompiledProgram
from ....compiler import BuildStrategy
from ....framework import IrGraph, Variable, Program
from ....log_helper import get_logger
from ..core.strategy import Strategy
from .quantization_pass import *
__all__ = ['QuantizationStrategy']
_logger = get_logger(
__name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s')
class QuantizationStrategy(Strategy):
"""
The strategy for Quantization.
"""
def __init__(self,
start_epoch=0,
end_epoch=0,
float_model_save_path=None,
mobile_model_save_path=None,
int8_model_save_path=None,
activation_bits=8,
weight_bits=8,
activation_quantize_type='abs_max',
weight_quantize_type='abs_max',
save_in_nodes=None,
save_out_nodes=None):
"""
Args:
start_epoch(int): The 'on_epoch_begin' function will be called in start_epoch. default: 0
end_epoch(int): The 'on_epoch_end' function will be called in end_epoch. default: 0
float_model_save_path(str): The path to save model with float weights.
None means it doesn't save float model. default: None.
mobile_model_save_path(str): The path to save model for paddle-mobile execution.
None means it doesn't save mobile model. default: None.
int8_model_save_path(str): The path to save model with int8_t weight.
None means it doesn't save int8 model. default: None.
activation_bits(int): quantization bit number for activation. default: 8.
weight_bits(int): quantization bit number for weights. The bias is not quantized.
default: 8.
activation_quantize_type(str): quantization type for activation,
now support 'abs_max', 'range_abs_max' and 'moving_average_abs_max'.
If use 'abs_max' mode, the quantization scale will be calculated
dynamically each step in both training and testing period. If use
'range_abs_max', a static quantization scale will be calculated
during training and used in inference.
weight_quantize_type (str): quantization type for weights, support 'abs_max' and 'channel_wise_abs_max'.
The 'range_abs_max' usually is not used for weight, since weights are fixed once the model is well trained.
save_in_nodes(list<str>): A list of variable names used to prune graph
for saving inference model.
save_out_nodes(list<str>): A list of variable names used to prune graph
for saving inference model.
"""
super(QuantizationStrategy, self).__init__(start_epoch, end_epoch)
self.start_epoch = start_epoch
self.end_epoch = end_epoch
self.float_model_save_path = float_model_save_path
self.mobile_model_save_path = mobile_model_save_path
self.int8_model_save_path = int8_model_save_path
self.activation_bits = activation_bits
self.weight_bits = weight_bits
self.activation_quantize_type = activation_quantize_type
self.weight_quantize_type = weight_quantize_type
self.save_out_nodes = save_out_nodes
self.save_in_nodes = save_in_nodes
def restore_from_checkpoint(self, context):
"""
Restore graph when the compression task is inited from checkpoint.
"""
# It is inited from checkpoint and has missed start epoch.
if context.epoch_id != 0 and context.epoch_id > self.start_epoch:
_logger.info("Restore quantization task from checkpoint")
self._modify_graph_for_quantization(context)
_logger.info("Finish restoring quantization task from checkpoint")
def _modify_graph_for_quantization(self, context):
"""
Insert fake_quantize_op and fake_dequantize_op before training and testing.
"""
train_ir_graph = IrGraph(
core.Graph(context.optimize_graph.program.clone().desc),
for_test=False)
test_ir_graph = IrGraph(
core.Graph(context.eval_graph.program.clone().desc), for_test=True)
transform_pass = QuantizationTransformPass(
scope=context.scope,
place=context.place,
weight_bits=self.weight_bits,
activation_bits=self.activation_bits,
activation_quantize_type=self.activation_quantize_type,
weight_quantize_type=self.weight_quantize_type)
transform_pass.apply(train_ir_graph)
transform_pass.apply(test_ir_graph)
# Put persistables created by transform_pass into context.optimize_graph.persistables
# for saving checkpoint.
program_persistables = set()
for var in context.optimize_graph.program.list_vars():
if var.persistable:
program_persistables.add(var.name)
program = Program()
for var_node in train_ir_graph.all_persistable_nodes():
if var_node.name() not in program_persistables:
var_desc = var_node.var()
var = program.global_block().create_var(
name=var_node.name(),
shape=var_desc.shape(),
dtype=var_desc.dtype(),
type=var_desc.type(),
lod_level=var_desc.lod_level())
context.optimize_graph.persistables[var.name] = var
build_strategy = BuildStrategy()
build_strategy.enable_inplace = False
build_strategy.memory_optimize = False
build_strategy.fuse_all_reduce_ops = False
# for quantization training
context.optimize_graph.compiled_graph = CompiledProgram(
train_ir_graph.graph).with_data_parallel(
loss_name=context.optimize_graph.out_nodes['loss'],
build_strategy=build_strategy)
context.eval_graph.program = test_ir_graph.to_program()
# for saving inference model after training
context.put('quantization_test_ir_graph_backup', test_ir_graph)
def on_epoch_begin(self, context):
"""
Insert fake_quantize_op and fake_dequantize_op before training and testing.
"""
super(QuantizationStrategy, self).on_epoch_begin(context)
if self.start_epoch == context.epoch_id:
_logger.info('QuantizationStrategy::on_epoch_begin')
self._modify_graph_for_quantization(context)
_logger.info('Finish QuantizationStrategy::on_epoch_begin')
def on_epoch_end(self, context):
"""
Free and save inference model.
"""
super(QuantizationStrategy, self).on_compression_end(context)
if context.epoch_id == self.end_epoch:
_logger.info('QuantizationStrategy::on_epoch_end')
test_ir_graph = context.get('quantization_test_ir_graph_backup')
# freeze the graph after training
freeze_pass = QuantizationFreezePass(
scope=context.scope,
place=context.place,
weight_bits=self.weight_bits,
activation_bits=self.activation_bits,
weight_quantize_type=self.weight_quantize_type)
freeze_pass.apply(test_ir_graph)
# for other strategies
context.eval_graph.program = test_ir_graph.to_program()
if self.save_out_nodes == None:
out_vars = [
context.eval_graph.var(var_name)._var
for var_name in context.eval_graph.out_nodes.values()
]
else:
out_vars = [
context.eval_graph.var(var_name)._var
for var_name in self.save_out_nodes
]
if self.save_in_nodes == None:
in_vars = list(context.eval_graph.in_nodes.values())
else:
in_vars = self.save_in_nodes
# save float model
if self.float_model_save_path:
executor = Executor(context.place)
with scope_guard(context.scope):
io.save_inference_model(
self.float_model_save_path,
in_vars,
out_vars,
executor,
main_program=test_ir_graph.to_program(),
model_filename='model',
params_filename='weights',
export_for_deployment=True)
# save int8 model
if self.int8_model_save_path:
convert_int8_pass = ConvertToInt8Pass(
scope=context.scope, place=context.place)
convert_int8_pass.apply(test_ir_graph)
executor = Executor(context.place)
with scope_guard(context.scope):
io.save_inference_model(
self.int8_model_save_path,
in_vars,
out_vars,
executor,
main_program=test_ir_graph.to_program(),
model_filename='model',
params_filename='weights',
export_for_deployment=True)
# save mobile model
if self.mobile_model_save_path:
if not self.int8_model_save_path:
# convert the weights as int8_t type
convert_int8_pass = ConvertToInt8Pass(
scope=context.scope, place=context.place)
convert_int8_pass.apply(test_ir_graph)
# make some changes on the graph for the mobile inference
mobile_pass = TransformForMobilePass()
mobile_pass.apply(test_ir_graph)
executor = Executor(context.place)
with scope_guard(context.scope):
io.save_inference_model(
self.mobile_model_save_path,
in_vars,
out_vars,
executor,
main_program=test_ir_graph.to_program(),
model_filename='model',
params_filename='weights',
export_for_deployment=True)
_logger.info('Finish QuantizationStrategy::on_epoch_end')
| {
"content_hash": "b4ad7b8f45ec680641cf298b41faa3fb",
"timestamp": "",
"source": "github",
"line_count": 240,
"max_line_length": 119,
"avg_line_length": 45.125,
"alnum_prop": 0.566943674976916,
"repo_name": "chengduoZH/Paddle",
"id": "5004faeea78c1491ef33e7ebc6bee2b45d9823d8",
"size": "11441",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/contrib/slim/quantization/quantization_strategy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "32490"
},
{
"name": "C++",
"bytes": "10146609"
},
{
"name": "CMake",
"bytes": "291349"
},
{
"name": "Cuda",
"bytes": "1192566"
},
{
"name": "Dockerfile",
"bytes": "10002"
},
{
"name": "Python",
"bytes": "7124331"
},
{
"name": "Ruby",
"bytes": "353"
},
{
"name": "Shell",
"bytes": "200906"
}
],
"symlink_target": ""
} |
"""A pool for sqlite connections."""
import logging
import threading
import traceback
from typing import Any
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.pool import NullPool, SingletonThreadPool, StaticPool
from homeassistant.helpers.frame import report
from homeassistant.util.async_ import check_loop
from .const import DB_WORKER_PREFIX
_LOGGER = logging.getLogger(__name__)
# For debugging the MutexPool
DEBUG_MUTEX_POOL = True
DEBUG_MUTEX_POOL_TRACE = False
POOL_SIZE = 5
ADVISE_MSG = (
"Use homeassistant.components.recorder.get_instance(hass).async_add_executor_job()"
)
class RecorderPool(SingletonThreadPool, NullPool): # type: ignore[misc]
"""A hybrid of NullPool and SingletonThreadPool.
When called from the creating thread or db executor acts like SingletonThreadPool
When called from any other thread, acts like NullPool
"""
def __init__( # pylint: disable=super-init-not-called
self, *args: Any, **kw: Any
) -> None:
"""Create the pool."""
kw["pool_size"] = POOL_SIZE
SingletonThreadPool.__init__(self, *args, **kw)
@property
def recorder_or_dbworker(self) -> bool:
"""Check if the thread is a recorder or dbworker thread."""
thread_name = threading.current_thread().name
return bool(
thread_name == "Recorder" or thread_name.startswith(DB_WORKER_PREFIX)
)
# Any can be switched out for ConnectionPoolEntry in the next version of sqlalchemy
def _do_return_conn(self, conn: Any) -> Any:
if self.recorder_or_dbworker:
return super()._do_return_conn(conn)
conn.close()
def shutdown(self) -> None:
"""Close the connection."""
if (
self.recorder_or_dbworker
and self._conn
and hasattr(self._conn, "current")
and (conn := self._conn.current())
):
conn.close()
def dispose(self) -> None:
"""Dispose of the connection."""
if self.recorder_or_dbworker:
super().dispose()
# Any can be switched out for ConnectionPoolEntry in the next version of sqlalchemy
def _do_get(self) -> Any:
if self.recorder_or_dbworker:
return super()._do_get()
check_loop(
self._do_get_db_connection_protected,
strict=True,
advise_msg=ADVISE_MSG,
)
return self._do_get_db_connection_protected()
def _do_get_db_connection_protected(self) -> Any:
report(
"accesses the database without the database executor; "
f"{ADVISE_MSG} "
"for faster database operations",
exclude_integrations={"recorder"},
error_if_core=False,
)
return super(NullPool, self)._create_connection()
class MutexPool(StaticPool): # type: ignore[misc]
"""A pool which prevents concurrent accesses from multiple threads.
This is used in tests to prevent unsafe concurrent accesses to in-memory SQLite
databases.
"""
_reference_counter = 0
pool_lock: threading.RLock
def _do_return_conn(self, conn: Any) -> None:
if DEBUG_MUTEX_POOL_TRACE:
trace = traceback.extract_stack()
trace_msg = "\n" + "".join(traceback.format_list(trace[:-1]))
else:
trace_msg = ""
super()._do_return_conn(conn)
if DEBUG_MUTEX_POOL:
self._reference_counter -= 1
_LOGGER.debug(
"%s return conn ctr: %s%s",
threading.current_thread().name,
self._reference_counter,
trace_msg,
)
MutexPool.pool_lock.release()
def _do_get(self) -> Any:
if DEBUG_MUTEX_POOL_TRACE:
trace = traceback.extract_stack()
trace_msg = "".join(traceback.format_list(trace[:-1]))
else:
trace_msg = ""
if DEBUG_MUTEX_POOL:
_LOGGER.debug("%s wait conn%s", threading.current_thread().name, trace_msg)
got_lock = MutexPool.pool_lock.acquire(timeout=1)
if not got_lock:
raise SQLAlchemyError
conn = super()._do_get()
if DEBUG_MUTEX_POOL:
self._reference_counter += 1
_LOGGER.debug(
"%s get conn: ctr: %s",
threading.current_thread().name,
self._reference_counter,
)
return conn
| {
"content_hash": "bc8f3ff17d02ab2ac3a4ae34b01bad00",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 87,
"avg_line_length": 31.43661971830986,
"alnum_prop": 0.5965501792114696,
"repo_name": "nkgilley/home-assistant",
"id": "a8579df834c36e85eef170e440623c293e05ece0",
"size": "4464",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/recorder/pool.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "51597279"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
} |
"""
Implements a disjoint set using Lists and some added heuristics for efficiency
Union by Rank Heuristic and Path Compression
"""
class DisjointSet:
def __init__(self, set_counts: list) -> None:
"""
Initialize with a list of the number of items in each set
and with rank = 1 for each set
"""
self.set_counts = set_counts
self.max_set = max(set_counts)
num_sets = len(set_counts)
self.ranks = [1] * num_sets
self.parents = list(range(num_sets))
def merge(self, src: int, dst: int) -> bool:
"""
Merge two sets together using Union by rank heuristic
Return True if successful
Merge two disjoint sets
>>> A = DisjointSet([1, 1, 1])
>>> A.merge(1, 2)
True
>>> A.merge(0, 2)
True
>>> A.merge(0, 1)
False
"""
src_parent = self.get_parent(src)
dst_parent = self.get_parent(dst)
if src_parent == dst_parent:
return False
if self.ranks[dst_parent] >= self.ranks[src_parent]:
self.set_counts[dst_parent] += self.set_counts[src_parent]
self.set_counts[src_parent] = 0
self.parents[src_parent] = dst_parent
if self.ranks[dst_parent] == self.ranks[src_parent]:
self.ranks[dst_parent] += 1
joined_set_size = self.set_counts[dst_parent]
else:
self.set_counts[src_parent] += self.set_counts[dst_parent]
self.set_counts[dst_parent] = 0
self.parents[dst_parent] = src_parent
joined_set_size = self.set_counts[src_parent]
self.max_set = max(self.max_set, joined_set_size)
return True
def get_parent(self, disj_set: int) -> int:
"""
Find the Parent of a given set
>>> A = DisjointSet([1, 1, 1])
>>> A.merge(1, 2)
True
>>> A.get_parent(0)
0
>>> A.get_parent(1)
2
"""
if self.parents[disj_set] == disj_set:
return disj_set
self.parents[disj_set] = self.get_parent(self.parents[disj_set])
return self.parents[disj_set]
| {
"content_hash": "233f01116aa81b85607367bd13967c69",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 78,
"avg_line_length": 32.23529411764706,
"alnum_prop": 0.5456204379562044,
"repo_name": "TheAlgorithms/Python",
"id": "5103335bc80a91deb0490bda6bc36498f5d6f24f",
"size": "2192",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data_structures/disjoint_set/alternate_disjoint_set.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2601694"
}
],
"symlink_target": ""
} |
import sys
import json
import time
import datetime
from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook
from airflow.hooks.postgres_hook import PostgresHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from decimal import Decimal
from tempfile import NamedTemporaryFile
PY3 = sys.version_info[0] == 3
class PostgresToGoogleCloudStorageOperator(BaseOperator):
"""
Copy data from Postgres to Google Cloud Storage in JSON format.
"""
template_fields = ('sql', 'bucket', 'filename', 'schema_filename',
'parameters')
template_ext = ('.sql', )
ui_color = '#a0e08c'
@apply_defaults
def __init__(self,
sql,
bucket,
filename,
schema_filename=None,
approx_max_file_size_bytes=1900000000,
postgres_conn_id='postgres_default',
google_cloud_storage_conn_id='google_cloud_default',
delegate_to=None,
parameters=None,
*args,
**kwargs):
"""
:param sql: The SQL to execute on the Postgres table.
:type sql: str
:param bucket: The bucket to upload to.
:type bucket: str
:param filename: The filename to use as the object name when uploading
to Google Cloud Storage. A {} should be specified in the filename
to allow the operator to inject file numbers in cases where the
file is split due to size.
:type filename: str
:param schema_filename: If set, the filename to use as the object name
when uploading a .json file containing the BigQuery schema fields
for the table that was dumped from Postgres.
:type schema_filename: str
:param approx_max_file_size_bytes: This operator supports the ability
to split large table dumps into multiple files (see notes in the
filenamed param docs above). Google Cloud Storage allows for files
to be a maximum of 4GB. This param allows developers to specify the
file size of the splits.
:type approx_max_file_size_bytes: long
:param postgres_conn_id: Reference to a specific Postgres hook.
:type postgres_conn_id: str
:param google_cloud_storage_conn_id: Reference to a specific Google
cloud storage hook.
:type google_cloud_storage_conn_id: str
:param delegate_to: The account to impersonate, if any. For this to
work, the service account making the request must have domain-wide
delegation enabled.
:param parameters: a parameters dict that is substituted at query runtime.
:type parameters: dict
"""
super(PostgresToGoogleCloudStorageOperator, self).__init__(*args, **kwargs)
self.sql = sql
self.bucket = bucket
self.filename = filename
self.schema_filename = schema_filename
self.approx_max_file_size_bytes = approx_max_file_size_bytes
self.postgres_conn_id = postgres_conn_id
self.google_cloud_storage_conn_id = google_cloud_storage_conn_id
self.delegate_to = delegate_to
self.parameters = parameters
def execute(self, context):
cursor = self._query_postgres()
files_to_upload = self._write_local_data_files(cursor)
# If a schema is set, create a BQ schema JSON file.
if self.schema_filename:
files_to_upload.update(self._write_local_schema_file(cursor))
# Flush all files before uploading
for file_handle in files_to_upload.values():
file_handle.flush()
self._upload_to_gcs(files_to_upload)
# Close all temp file handles.
for file_handle in files_to_upload.values():
file_handle.close()
def _query_postgres(self):
"""
Queries Postgres and returns a cursor to the results.
"""
postgres = PostgresHook(postgres_conn_id=self.postgres_conn_id)
conn = postgres.get_conn()
cursor = conn.cursor()
cursor.execute(self.sql, self.parameters)
return cursor
def _write_local_data_files(self, cursor):
"""
Takes a cursor, and writes results to a local file.
:return: A dictionary where keys are filenames to be used as object
names in GCS, and values are file handles to local files that
contain the data for the GCS objects.
"""
schema = list(map(lambda schema_tuple: schema_tuple[0], cursor.description))
tmp_file_handles = {}
row_no = 0
def _create_new_file():
handle = NamedTemporaryFile(delete=True)
filename = self.filename.format(len(tmp_file_handles))
tmp_file_handles[filename] = handle
return handle
# Don't create a file if there is nothing to write
if cursor.rowcount > 0:
tmp_file_handle = _create_new_file()
for row in cursor:
# Convert datetime objects to utc seconds, and decimals to floats
row = map(self.convert_types, row)
row_dict = dict(zip(schema, row))
s = json.dumps(row_dict, sort_keys=True)
if PY3:
s = s.encode('utf-8')
tmp_file_handle.write(s)
# Append newline to make dumps BigQuery compatible.
tmp_file_handle.write(b'\n')
# Stop if the file exceeds the file size limit.
if tmp_file_handle.tell() >= self.approx_max_file_size_bytes:
tmp_file_handle = _create_new_file()
row_no += 1
self.log.info('Received %s rows over %s files', row_no, len(tmp_file_handles))
return tmp_file_handles
def _write_local_schema_file(self, cursor):
"""
Takes a cursor, and writes the BigQuery schema for the results to a
local file system.
:return: A dictionary where key is a filename to be used as an object
name in GCS, and values are file handles to local files that
contains the BigQuery schema fields in .json format.
"""
schema = []
for field in cursor.description:
# See PEP 249 for details about the description tuple.
field_name = field[0]
field_type = self.type_map(field[1])
field_mode = 'REPEATED' if field[1] in (1009, 1005, 1007,
1016) else 'NULLABLE'
schema.append({
'name': field_name,
'type': field_type,
'mode': field_mode,
})
self.log.info('Using schema for %s: %s', self.schema_filename, schema)
tmp_schema_file_handle = NamedTemporaryFile(delete=True)
s = json.dumps(schema, sort_keys=True)
if PY3:
s = s.encode('utf-8')
tmp_schema_file_handle.write(s)
return {self.schema_filename: tmp_schema_file_handle}
def _upload_to_gcs(self, files_to_upload):
"""
Upload all of the file splits (and optionally the schema .json file) to
Google Cloud Storage.
"""
hook = GoogleCloudStorageHook(
google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
delegate_to=self.delegate_to)
for object, tmp_file_handle in files_to_upload.items():
hook.upload(self.bucket, object, tmp_file_handle.name,
'application/json')
@classmethod
def convert_types(cls, value):
"""
Takes a value from Postgres, and converts it to a value that's safe for
JSON/Google Cloud Storage/BigQuery. Dates are converted to UTC seconds.
Decimals are converted to floats. Times are converted to seconds.
"""
if type(value) in (datetime.datetime, datetime.date):
return time.mktime(value.timetuple())
elif type(value) == datetime.time:
formated_time = time.strptime(str(value), "%H:%M:%S")
return datetime.timedelta(
hours=formated_time.tm_hour,
minutes=formated_time.tm_min,
seconds=formated_time.tm_sec).seconds
elif isinstance(value, Decimal):
return float(value)
else:
return value
@classmethod
def type_map(cls, postgres_type):
"""
Helper function that maps from Postgres fields to BigQuery fields. Used
when a schema_filename is set.
"""
d = {
1114: 'TIMESTAMP',
1184: 'TIMESTAMP',
1082: 'TIMESTAMP',
1083: 'TIMESTAMP',
1005: 'INTEGER',
1007: 'INTEGER',
1016: 'INTEGER',
20: 'INTEGER',
21: 'INTEGER',
23: 'INTEGER',
16: 'BOOLEAN',
700: 'FLOAT',
701: 'FLOAT',
1700: 'FLOAT'
}
return d[postgres_type] if postgres_type in d else 'STRING'
| {
"content_hash": "7fefc50f3aa733bec5edbe990a05fca4",
"timestamp": "",
"source": "github",
"line_count": 237,
"max_line_length": 86,
"avg_line_length": 38.77637130801688,
"alnum_prop": 0.5873775843307943,
"repo_name": "fenglu-g/incubator-airflow",
"id": "78da78ee2f20d99650682543b4a4e597f1c281e6",
"size": "10002",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "airflow/contrib/operators/postgres_to_gcs_operator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12126"
},
{
"name": "Dockerfile",
"bytes": "3634"
},
{
"name": "HTML",
"bytes": "129454"
},
{
"name": "JavaScript",
"bytes": "22118"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "5852162"
},
{
"name": "Shell",
"bytes": "41793"
}
],
"symlink_target": ""
} |
import random
import logging
from django.contrib.auth.models import User
from .models import Task
def add_tasks(*args):
"""Добавляет три задачи для каждого пользователя в системе."""
tasks = []
for user in User.objects.all():
tasks += [Task(task=task * 5, user=user) for task in 'xyz']
Task.objects.bulk_create(tasks) if tasks else None
def delete_all_tasks(*args):
Task.objects.all().delete()
def is_chance(percentage=0):
"""Возвращает True с процентной вероятностью либо False."""
try:
return not random.randrange(100 // percentage)
except Exception as e:
logger = logging.getLogger('django')
logger.info(e, exc_info=True)
return False
| {
"content_hash": "7d0c17d960f14ef9dbdd5400e62cf74c",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 67,
"avg_line_length": 24,
"alnum_prop": 0.6666666666666666,
"repo_name": "chaos-soft/chocola",
"id": "461bbe380adb1b075cd0f549135f733eea1c6c60",
"size": "805",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tasks/common.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "970"
},
{
"name": "JavaScript",
"bytes": "4015"
},
{
"name": "Python",
"bytes": "29425"
}
],
"symlink_target": ""
} |
'''
Management of Linux logical volumes
===================================
A state module to manage LVMs
.. code-block:: yaml
/dev/sda:
lvm.pv_present
my_vg:
lvm.vg_present:
- devices: /dev/sda
lvroot:
lvm.lv_present:
- vgname: my_vg
- size: 10G
- stripes: 5
- stripesize: 8K
'''
# Import salt libs
import salt.utils
def __virtual__():
'''
Only load the module if lvm is installed
'''
if salt.utils.which('lvm'):
return 'lvm'
return False
def pv_present(name, **kwargs):
'''
Set a physical device to be used as an LVM physical volume
name
The device name to initialize.
kwargs
Any supported options to pvcreate. See
:mod:`linux_lvm <salt.modules.linux_lvm>` for more details.
'''
ret = {'changes': {},
'comment': '',
'name': name,
'result': True,
'state_stdout': ''}
if __salt__['lvm.pvdisplay'](name):
ret['comment'] = 'Physical Volume {0} already present'.format(name)
elif __opts__['test']:
ret['comment'] = 'Physical Volume {0} is set to be created'.format(name)
ret['result'] = None
return ret
else:
changes = __salt__['lvm.pvcreate'](name, kwargs=kwargs, state_ret=ret)
if __salt__['lvm.pvdisplay'](name):
ret['comment'] = 'Created Physical Volume {0}'.format(name)
ret['changes'] = changes
else:
ret['comment'] = 'Failed to create Physical Volume {0}'.format(name)
ret['result'] = False
ret['state_stdout'] = changes
return ret
def vg_present(name, devices=None, **kwargs):
'''
Create an LVM volume group
name
The volume group name to create
devices
A list of devices that will be added to the volume group
kwargs
Any supported options to vgcreate. See
:mod:`linux_lvm <salt.modules.linux_lvm>` for more details.
'''
ret = {'changes': {},
'comment': '',
'name': name,
'result': True,
'state_stdout': ''}
if __salt__['lvm.vgdisplay'](name):
ret['comment'] = 'Volume Group {0} already present'.format(name)
elif __opts__['test']:
ret['comment'] = 'Volume Group {0} is set to be created'.format(name)
ret['result'] = None
return ret
else:
changes = __salt__['lvm.vgcreate'](name, devices, kwargs=kwargs, state_ret=ret)
if __salt__['lvm.vgdisplay'](name):
ret['comment'] = 'Created Volume Group {0}'.format(name)
ret['changes'] = changes
else:
ret['comment'] = 'Failed to create Volume Group {0}'.format(name)
ret['result'] = False
ret['state_stdout'] = changes
return ret
def vg_absent(name):
'''
Remove an LVM volume group
name
The volume group to remove
'''
ret = {'changes': {},
'comment': '',
'name': name,
'result': True,
'state_stdout': ''}
if not __salt__['lvm.vgdisplay'](name):
ret['comment'] = 'Volume Group {0} already absent'.format(name)
elif __opts__['test']:
ret['comment'] = 'Volume Group {0} is set to be removed'.format(name)
ret['result'] = None
return ret
else:
changes = __salt__['lvm.vgremove'](name, state_ret=ret)
if not __salt__['lvm.vgdisplay'](name):
ret['comment'] = 'Removed Volume Group {0}'.format(name)
ret['changes'] = changes
else:
ret['comment'] = 'Failed to remove Volume Group {0}'.format(name)
ret['result'] = False
ret['state_stdout'] = changes
return ret
def lv_present(name, vgname=None, size=None, extents=None, snapshot=None, pv='', **kwargs):
'''
Create a new logical volume
name
The name of the logical volume
vgname
The volume group name for this logical volume
size
The initial size of the logical volume
extents
The number of logical extents to allocate
snapshot
The name of the snapshot
pv
The physical volume to use
kwargs
Any supported options to lvcreate. See
:mod:`linux_lvm <salt.modules.linux_lvm>` for more details.
'''
ret = {'changes': {},
'comment': '',
'name': name,
'result': True,
'state_stdout': ''}
_snapshot = None
if snapshot:
_snapshot = name
name = snapshot
lvpath = '/dev/{0}/{1}'.format(vgname, name)
if __salt__['lvm.lvdisplay'](lvpath):
ret['comment'] = 'Logical Volume {0} already present'.format(name)
elif __opts__['test']:
ret['comment'] = 'Logical Volume {0} is set to be created'.format(name)
ret['result'] = None
return ret
else:
changes = __salt__['lvm.lvcreate'](name,
vgname,
size=size,
extents=extents,
snapshot=_snapshot,
pv=pv,
kwargs=kwargs,
state_ret=ret)
if __salt__['lvm.lvdisplay'](lvpath):
ret['comment'] = 'Created Logical Volume {0}'.format(name)
ret['changes'] = changes
else:
ret['comment'] = 'Failed to create Logical Volume {0}'.format(name)
ret['result'] = False
ret['state_stdout'] = changes
return ret
def lv_absent(name, vgname=None):
'''
Remove a given existing logical volume from a named existing volume group
name
The logical volume to remove
vgname
The volume group name
'''
ret = {'changes': {},
'comment': '',
'name': name,
'result': True,
'state_stdout': ''}
lvpath = '/dev/{0}/{1}'.format(vgname, name)
if not __salt__['lvm.lvdisplay'](lvpath):
ret['comment'] = 'Logical Volume {0} already absent'.format(name)
elif __opts__['test']:
ret['comment'] = 'Logical Volume {0} is set to be removed'.format(name)
ret['result'] = None
return ret
else:
changes = __salt__['lvm.lvremove'](name, vgname, state_ret=ret)
if not __salt__['lvm.lvdisplay'](lvpath):
ret['comment'] = 'Removed Logical Volume {0}'.format(name)
ret['changes'] = changes
else:
ret['comment'] = 'Failed to remove Logical Volume {0}'.format(name)
ret['result'] = False
ret['state_stdout'] = changes
return ret
| {
"content_hash": "a8316aa08950f1b30e1b717e979416fe",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 91,
"avg_line_length": 28.206611570247933,
"alnum_prop": 0.5190448285965427,
"repo_name": "MadeiraCloud/salt",
"id": "eba27000b02e0142689ff97dab74c64e72a3fdc5",
"size": "6850",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sources/salt/states/lvm.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "10058"
},
{
"name": "Makefile",
"bytes": "1815"
},
{
"name": "Python",
"bytes": "4530204"
},
{
"name": "Shell",
"bytes": "169676"
}
],
"symlink_target": ""
} |
import os
import pytest
from astropy import units as u
from astropy.coordinates import EarthLocation
from astropy.coordinates import SkyCoord
from pocs.mount.simulator import Mount
from pocs.utils import altaz_to_radec
@pytest.fixture
def location(config):
loc = config['location']
return EarthLocation(lon=loc['longitude'], lat=loc['latitude'], height=loc['elevation'])
@pytest.fixture
def target(location):
return altaz_to_radec(obstime='2016-08-13 21:03:01', location=location, alt=45, az=90)
def test_no_location():
with pytest.raises(TypeError):
Mount()
@pytest.fixture(scope='function')
def mount(location):
return Mount(location=location)
def test_connect(mount):
assert mount.connect() is True
def test_disconnect(mount):
assert mount.connect() is True
assert mount.disconnect() is True
assert mount.is_connected is False
def test_initialize(mount):
assert mount.initialize() is True
def test_target_coords(mount):
c = SkyCoord('20h00m43.7135s +22d42m39.0645s')
mount.set_target_coordinates(c)
assert mount.get_target_coordinates().to_string() == '300.182 22.7109'
def test_set_park_coords(mount):
os.environ['POCSTIME'] = '2016-08-13 23:03:01'
mount.set_park_coordinates()
assert mount._park_coordinates is not None
assert mount._park_coordinates.dec.value == -10.0
assert mount._park_coordinates.ra.value - 322.98 <= 1.0
os.environ['POCSTIME'] = '2016-08-13 13:03:01'
mount.set_park_coordinates()
assert mount._park_coordinates.dec.value == -10.0
assert mount._park_coordinates.ra.value - 172.57 <= 1.0
def test_status(mount):
status1 = mount.status()
assert 'mount_target_ra' not in status1
c = SkyCoord('20h00m43.7135s +22d42m39.0645s')
mount.set_target_coordinates(c)
assert mount.get_target_coordinates().to_string() == '300.182 22.7109'
status2 = mount.status()
assert 'mount_target_ra' in status2
def test_update_location_no_init(mount, config):
loc = config['location']
location2 = EarthLocation(
lon=loc['longitude'],
lat=loc['latitude'],
height=loc['elevation'] -
1000 *
u.meter)
with pytest.raises(AssertionError):
mount.location = location2
def test_update_location(mount, config):
loc = config['location']
mount.initialize()
location1 = mount.location
location2 = EarthLocation(
lon=loc['longitude'],
lat=loc['latitude'],
height=loc['elevation'] -
1000 *
u.meter)
mount.location = location2
assert location1 != location2
assert mount.location == location2
def test_set_tracking_rate(mount):
mount.initialize()
assert mount.tracking_rate == 1.0
mount.set_tracking_rate(delta=.005)
assert mount.tracking_rate == 1.005
mount.set_tracking_rate()
assert mount.tracking_rate == 1.0
def test_no_slew_without_unpark(mount):
os.environ['POCSTIME'] = '2016-08-13 20:03:01'
mount.initialize()
assert mount.is_parked is True
assert mount.slew_to_target() is False
def test_no_slew_without_target(mount):
os.environ['POCSTIME'] = '2016-08-13 20:03:01'
mount.initialize(unpark=True)
assert mount.slew_to_target() is False
def test_slew_to_target(mount, target):
os.environ['POCSTIME'] = '2016-08-13 20:03:01'
assert mount.is_parked is True
mount.initialize(unpark=True)
parked_coords = mount.get_current_coordinates()
assert mount.set_target_coordinates(target) is True
assert parked_coords != target
assert mount.slew_to_target() is True
current_coord = mount.get_current_coordinates()
assert (current_coord.ra.value - target.ra.value) < 0.5
assert (current_coord.dec.value - target.dec.value) < 0.5
mount.park()
assert mount.is_parked is True
mount.get_current_coordinates() == parked_coords
def test_slew_to_home(mount):
mount.initialize()
assert mount.is_parked is True
assert mount.is_home is False
mount.slew_to_home()
assert mount.is_parked is False
assert mount.is_home is True
| {
"content_hash": "e3771fa328e972c9318255e2a7214eaa",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 92,
"avg_line_length": 23.65142857142857,
"alnum_prop": 0.6798743657888379,
"repo_name": "AstroHuntsman/POCS",
"id": "eafa4b2a3dc1f967d8a16cd8d507906c42840446",
"size": "4139",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pocs/tests/test_mount_simulator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "22750"
},
{
"name": "C++",
"bytes": "55452"
},
{
"name": "JavaScript",
"bytes": "13166"
},
{
"name": "Processing",
"bytes": "9037"
},
{
"name": "Python",
"bytes": "657327"
},
{
"name": "Shell",
"bytes": "13542"
}
],
"symlink_target": ""
} |
from collections import defaultdict, namedtuple
from sanic.constants import HTTP_METHODS
from sanic.views import CompositionView
FutureRoute = namedtuple('Route',
['handler', 'uri', 'methods',
'host', 'strict_slashes'])
FutureListener = namedtuple('Listener', ['handler', 'uri', 'methods', 'host'])
FutureMiddleware = namedtuple('Route', ['middleware', 'args', 'kwargs'])
FutureException = namedtuple('Route', ['handler', 'args', 'kwargs'])
FutureStatic = namedtuple('Route',
['uri', 'file_or_directory', 'args', 'kwargs'])
class Blueprint:
def __init__(self, name, url_prefix=None, host=None):
"""Create a new blueprint
:param name: unique name of the blueprint
:param url_prefix: URL to be prefixed before all route URLs
"""
self.name = name
self.url_prefix = url_prefix
self.host = host
self.routes = []
self.websocket_routes = []
self.exceptions = []
self.listeners = defaultdict(list)
self.middlewares = []
self.statics = []
def register(self, app, options):
"""Register the blueprint to the sanic app."""
url_prefix = options.get('url_prefix', self.url_prefix)
# Routes
for future in self.routes:
# attach the blueprint name to the handler so that it can be
# prefixed properly in the router
future.handler.__blueprintname__ = self.name
# Prepend the blueprint URI prefix if available
uri = url_prefix + future.uri if url_prefix else future.uri
app.route(
uri=uri[1:] if uri.startswith('//') else uri,
methods=future.methods,
host=future.host or self.host,
strict_slashes=future.strict_slashes
)(future.handler)
for future in self.websocket_routes:
# attach the blueprint name to the handler so that it can be
# prefixed properly in the router
future.handler.__blueprintname__ = self.name
# Prepend the blueprint URI prefix if available
uri = url_prefix + future.uri if url_prefix else future.uri
app.websocket(
uri=uri,
host=future.host or self.host,
strict_slashes=future.strict_slashes
)(future.handler)
# Middleware
for future in self.middlewares:
if future.args or future.kwargs:
app.middleware(*future.args,
**future.kwargs)(future.middleware)
else:
app.middleware(future.middleware)
# Exceptions
for future in self.exceptions:
app.exception(*future.args, **future.kwargs)(future.handler)
# Static Files
for future in self.statics:
# Prepend the blueprint URI prefix if available
uri = url_prefix + future.uri if url_prefix else future.uri
app.static(uri, future.file_or_directory,
*future.args, **future.kwargs)
# Event listeners
for event, listeners in self.listeners.items():
for listener in listeners:
app.listener(event)(listener)
def route(self, uri, methods=frozenset({'GET'}), host=None,
strict_slashes=False):
"""Create a blueprint route from a decorated function.
:param uri: endpoint at which the route will be accessible.
:param methods: list of acceptable HTTP methods.
"""
def decorator(handler):
route = FutureRoute(handler, uri, methods, host, strict_slashes)
self.routes.append(route)
return handler
return decorator
def add_route(self, handler, uri, methods=frozenset({'GET'}), host=None,
strict_slashes=False):
"""Create a blueprint route from a function.
:param handler: function for handling uri requests. Accepts function,
or class instance with a view_class method.
:param uri: endpoint at which the route will be accessible.
:param methods: list of acceptable HTTP methods.
:return: function or class instance
"""
# Handle HTTPMethodView differently
if hasattr(handler, 'view_class'):
methods = set()
for method in HTTP_METHODS:
if getattr(handler.view_class, method.lower(), None):
methods.add(method)
# handle composition view differently
if isinstance(handler, CompositionView):
methods = handler.handlers.keys()
self.route(uri=uri, methods=methods, host=host,
strict_slashes=strict_slashes)(handler)
return handler
def websocket(self, uri, host=None, strict_slashes=False):
"""Create a blueprint websocket route from a decorated function.
:param uri: endpoint at which the route will be accessible.
"""
def decorator(handler):
route = FutureRoute(handler, uri, [], host, strict_slashes)
self.websocket_routes.append(route)
return handler
return decorator
def add_websocket_route(self, handler, uri, host=None):
"""Create a blueprint websocket route from a function.
:param handler: function for handling uri requests. Accepts function,
or class instance with a view_class method.
:param uri: endpoint at which the route will be accessible.
:return: function or class instance
"""
self.websocket(uri=uri, host=host)(handler)
return handler
def listener(self, event):
"""Create a listener from a decorated function.
:param event: Event to listen to.
"""
def decorator(listener):
self.listeners[event].append(listener)
return listener
return decorator
def middleware(self, *args, **kwargs):
"""Create a blueprint middleware from a decorated function."""
def register_middleware(_middleware):
future_middleware = FutureMiddleware(_middleware, args, kwargs)
self.middlewares.append(future_middleware)
return _middleware
# Detect which way this was called, @middleware or @middleware('AT')
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
middleware = args[0]
args = []
return register_middleware(middleware)
else:
return register_middleware
def exception(self, *args, **kwargs):
"""Create a blueprint exception from a decorated function."""
def decorator(handler):
exception = FutureException(handler, args, kwargs)
self.exceptions.append(exception)
return handler
return decorator
def static(self, uri, file_or_directory, *args, **kwargs):
"""Create a blueprint static route from a decorated function.
:param uri: endpoint at which the route will be accessible.
:param file_or_directory: Static asset.
"""
static = FutureStatic(uri, file_or_directory, args, kwargs)
self.statics.append(static)
# Shorthand method decorators
def get(self, uri, host=None, strict_slashes=False):
return self.route(uri, methods=["GET"], host=host,
strict_slashes=strict_slashes)
def post(self, uri, host=None, strict_slashes=False):
return self.route(uri, methods=["POST"], host=host,
strict_slashes=strict_slashes)
def put(self, uri, host=None, strict_slashes=False):
return self.route(uri, methods=["PUT"], host=host,
strict_slashes=strict_slashes)
def head(self, uri, host=None, strict_slashes=False):
return self.route(uri, methods=["HEAD"], host=host,
strict_slashes=strict_slashes)
def options(self, uri, host=None, strict_slashes=False):
return self.route(uri, methods=["OPTIONS"], host=host,
strict_slashes=strict_slashes)
def patch(self, uri, host=None, strict_slashes=False):
return self.route(uri, methods=["PATCH"], host=host,
strict_slashes=strict_slashes)
def delete(self, uri, host=None, strict_slashes=False):
return self.route(uri, methods=["DELETE"], host=host,
strict_slashes=strict_slashes)
| {
"content_hash": "5aca6b4d7c8238fb5043dc1795f30eb2",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 78,
"avg_line_length": 39.39269406392694,
"alnum_prop": 0.5966152776167846,
"repo_name": "jrocketfingers/sanic",
"id": "7e9953e08852264f83750ab037b2690e87d9c371",
"size": "8627",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sanic/blueprints.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "482"
},
{
"name": "Makefile",
"bytes": "108"
},
{
"name": "Python",
"bytes": "231242"
}
],
"symlink_target": ""
} |
from pybuilder.core import use_plugin, init, Author
use_plugin("python.core")
use_plugin("python.unittest")
use_plugin("python.install_dependencies")
use_plugin("python.flake8")
use_plugin("python.coverage")
use_plugin("python.distutils")
use_plugin("copy_resources")
name = "previewr"
default_task = "publish"
summary = "Simple Markdown/reStructured Text previewer"
authors = [Author("Raphael Zimmermann", "mister.norbert@gmail.com")]
description = open('README.rst').read()
license = "MIT"
url = "http://raphael.li/projects/previewr/"
version = "0.5.0.dev"
@init
def set_properties(project):
# Scripts directory
project.set_property("distutils_console_scripts", ["previewr = previewr:main"])
project.include_file('previewr', 'templates/*')
project.include_file('previewr', 'static/*')
# Also builde resources
project.set_property("copy_resources_target", "$dir_dist/previewr")
project.set_property("copy_resources_glob", ['static/*', 'templates/*'])
project.set_property("distutils_classifiers", [
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
])
# Don't break the build for covarage
project.set_property('coverage_break_build', False)
# Dependencies from requirements.txt
project.depends_on_requirements("requirements.txt")
| {
"content_hash": "a1887d10643d2761426d670950c901d1",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 83,
"avg_line_length": 33.1875,
"alnum_prop": 0.6880100439422473,
"repo_name": "raphiz/previewr",
"id": "e8911d005cf53d2137f03d9fb139f2fbb78168aa",
"size": "1593",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8426"
},
{
"name": "HTML",
"bytes": "1294"
},
{
"name": "JavaScript",
"bytes": "494"
},
{
"name": "Python",
"bytes": "14174"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import url, include, patterns
from corehq.apps.appstore.dispatcher import AppstoreDispatcher
store_urls = patterns('corehq.apps.appstore.views',
url(r'^$', 'appstore_default', name="appstore_interfaces_default"),
AppstoreDispatcher.url_pattern(),
)
urlpatterns = patterns('corehq.apps.appstore.views',
url(r'^$', 'appstore', name='appstore'),
url(r'^api/', 'appstore_api', name='appstore_api'),
url(r'^store/', include(store_urls)),
url(r'^(?P<domain>[\w\.-]+)/info/$', 'project_info', name='project_info'),
url(r'^deployments/$', 'deployments', name='deployments'),
url(r'^deployments/api/$', 'deployments_api', name='deployments_api'),
url(r'^deployments/(?P<domain>[\w\.-]+)/info/$', 'deployment_info', name='deployment_info'),
url(r'^(?P<domain>[\w\.-]+)/approve/$', 'approve_app', name='approve_appstore_app'),
url(r'^(?P<domain>[\w\.-]+)/copy/$', 'copy_snapshot', name='domain_copy_snapshot'),
url(r'^(?P<domain>[\w\.-]+)/importapp/$', 'import_app', name='import_app_from_snapshot'),
url(r'^(?P<domain>[\w\.-]+)/image/$', 'project_image', name='appstore_project_image'),
url(r'^(?P<domain>[\w\.-]+)/multimedia/$', 'media_files', name='media_files'),
)
| {
"content_hash": "b7839d410dfa4e26ec798af3bd992afe",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 96,
"avg_line_length": 48.11538461538461,
"alnum_prop": 0.6338928856914469,
"repo_name": "SEL-Columbia/commcare-hq",
"id": "60563aa2ef81de63dbaea0f3ad170ec8ec84759d",
"size": "1251",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "corehq/apps/appstore/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "768322"
},
{
"name": "JavaScript",
"bytes": "2647080"
},
{
"name": "Python",
"bytes": "7806659"
},
{
"name": "Shell",
"bytes": "28569"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from kolibri.core.content import hooks as content_hooks
from kolibri.plugins.base import KolibriPluginBase
class HTML5AppPlugin(KolibriPluginBase):
pass
class HTML5AppAsset(content_hooks.ContentRendererHook):
unique_slug = "html5_app_renderer_module"
src_file = "assets/src/module.js"
content_types_file = "content_types.json"
| {
"content_hash": "5d1271bbc8e6317a295dd507359e7cdc",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 55,
"avg_line_length": 29.0625,
"alnum_prop": 0.7720430107526882,
"repo_name": "lyw07/kolibri",
"id": "86c009665017ad440e07d7386f93a452c8033c55",
"size": "465",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "kolibri/plugins/html5_app_renderer/kolibri_plugin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "601"
},
{
"name": "CSS",
"bytes": "2007902"
},
{
"name": "Dockerfile",
"bytes": "6930"
},
{
"name": "Gherkin",
"bytes": "199214"
},
{
"name": "HTML",
"bytes": "34393"
},
{
"name": "JavaScript",
"bytes": "1376767"
},
{
"name": "Makefile",
"bytes": "11718"
},
{
"name": "Python",
"bytes": "1896793"
},
{
"name": "Shell",
"bytes": "11350"
},
{
"name": "Vue",
"bytes": "1278479"
}
],
"symlink_target": ""
} |
"""Installation test for YAMNet."""
import numpy as np
import tensorflow as tf
import params
import yamnet
class YAMNetTest(tf.test.TestCase):
_yamnet_graph = None
_yamnet = None
_yamnet_classes = None
@classmethod
def setUpClass(cls):
super(YAMNetTest, cls).setUpClass()
cls._yamnet_graph = tf.Graph()
with cls._yamnet_graph.as_default():
cls._yamnet = yamnet.yamnet_frames_model(params)
cls._yamnet.load_weights('yamnet.h5')
cls._yamnet_classes = yamnet.class_names('yamnet_class_map.csv')
def clip_test(self, waveform, expected_class_name, top_n=10):
"""Run the model on the waveform, check that expected class is in top-n."""
with YAMNetTest._yamnet_graph.as_default():
prediction = np.mean(YAMNetTest._yamnet.predict(
np.reshape(waveform, [1, -1]), steps=1)[0], axis=0)
top_n_class_names = YAMNetTest._yamnet_classes[
np.argsort(prediction)[-top_n:]]
self.assertIn(expected_class_name, top_n_class_names)
def testZeros(self):
self.clip_test(
waveform=np.zeros((1, int(3 * params.SAMPLE_RATE))),
expected_class_name='Silence')
def testRandom(self):
np.random.seed(51773) # Ensure repeatability.
self.clip_test(
waveform=np.random.uniform(-1.0, +1.0,
(1, int(3 * params.SAMPLE_RATE))),
expected_class_name='White noise')
def testSine(self):
self.clip_test(
waveform=np.reshape(
np.sin(2 * np.pi * 440 * np.linspace(
0, 3, int(3 *params.SAMPLE_RATE))),
[1, -1]),
expected_class_name='Sine wave')
if __name__ == '__main__':
tf.test.main()
| {
"content_hash": "20cfe67bebc9806f5d1ac62a0d9181b2",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 79,
"avg_line_length": 30.6,
"alnum_prop": 0.6203208556149733,
"repo_name": "alexgorban/models",
"id": "c3f64859949ce4bc7cc83529334a9e29da0d0124",
"size": "2372",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "research/audioset/yamnet/yamnet_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1619012"
},
{
"name": "Dockerfile",
"bytes": "9821"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "147010"
},
{
"name": "JavaScript",
"bytes": "33316"
},
{
"name": "Jupyter Notebook",
"bytes": "454746"
},
{
"name": "Makefile",
"bytes": "4933"
},
{
"name": "Python",
"bytes": "16363107"
},
{
"name": "Shell",
"bytes": "144095"
},
{
"name": "Starlark",
"bytes": "148029"
}
],
"symlink_target": ""
} |
from django.db import models
from django.db.models.query import QuerySet
# Following queryset, manager, and models are based on the snippet at
# http://www.djangosnippets.org/snippets/1034/
class SubclassQuerySet(QuerySet):
def __getitem__(self, k):
result = super(SubclassQuerySet, self).__getitem__(k)
if isinstance(result, models.Model) :
return result.as_leaf_class()
else :
return result
def __iter__(self):
for item in super(SubclassQuerySet, self).__iter__():
yield item.as_leaf_class()
class SubclassManager(models.Manager):
def get_query_set(self):
return SubclassQuerySet(self.model) | {
"content_hash": "8cd4dc59520e1d64947c04d753df23c9",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 69,
"avg_line_length": 36.89473684210526,
"alnum_prop": 0.6476462196861626,
"repo_name": "maxcutler/Courant-News",
"id": "8ec2bc2b8034af8377313e52ef49953b367b3de7",
"size": "701",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "courant/core/utils/managers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "47452"
},
{
"name": "Python",
"bytes": "487441"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class FamilysrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name="familysrc",
parent_name="scatterpolar.hoverlabel.font",
**kwargs,
):
super(FamilysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs,
)
| {
"content_hash": "5125be76edcf5894c6d9a41405208263",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 68,
"avg_line_length": 28.75,
"alnum_prop": 0.5847826086956521,
"repo_name": "plotly/plotly.py",
"id": "e010d52797827ac55a61fe113402255df9eb6fc4",
"size": "460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/scatterpolar/hoverlabel/font/_familysrc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
"""Abstraction of an OF table."""
# Copyright (C) 2015 Brad Cowie, Christopher Lorier and Joe Stringer.
# Copyright (C) 2015 Research and Education Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2018 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import struct
from faucet import valve_of
class ValveTable(object):
"""Wrapper for an OpenFlow table."""
def __init__(self, table_id, name, restricted_match_types,
flow_cookie, notify_flow_removed=False):
self.table_id = table_id
self.name = name
self.restricted_match_types = None
if restricted_match_types:
self.restricted_match_types = set(restricted_match_types)
self.flow_cookie = flow_cookie
self.notify_flow_removed = notify_flow_removed
def match(self, in_port=None, vlan=None,
eth_type=None, eth_src=None,
eth_dst=None, eth_dst_mask=None,
icmpv6_type=None,
nw_proto=None, nw_dst=None):
"""Compose an OpenFlow match rule."""
match_dict = valve_of.build_match_dict(
in_port, vlan, eth_type, eth_src,
eth_dst, eth_dst_mask, icmpv6_type,
nw_proto, nw_dst)
match = valve_of.match(match_dict)
if self.restricted_match_types is not None:
for match_type in match_dict:
assert match_type in self.restricted_match_types, '%s match in table %s' % (
match_type, self.name)
return match
def flowmod(self, match=None, priority=None,
inst=None, command=valve_of.ofp.OFPFC_ADD, out_port=0,
out_group=0, hard_timeout=0, idle_timeout=0, cookie=None):
"""Helper function to construct a flow mod message with cookie."""
if match is None:
match = self.match()
if priority is None:
priority = 0 # self.dp.lowest_priority
if inst is None:
inst = []
if cookie is None:
cookie = self.flow_cookie
flags = 0
if self.notify_flow_removed:
flags = valve_of.ofp.OFPFF_SEND_FLOW_REM
return valve_of.flowmod(
cookie,
command,
self.table_id,
priority,
out_port,
out_group,
match,
inst,
hard_timeout,
idle_timeout,
flags)
def flowdel(self, match=None, priority=None, out_port=valve_of.ofp.OFPP_ANY, strict=False):
"""Delete matching flows from a table."""
command = valve_of.ofp.OFPFC_DELETE
if strict:
command = valve_of.ofp.OFPFC_DELETE_STRICT
return [
self.flowmod(
match=match,
priority=priority,
command=command,
out_port=out_port,
out_group=valve_of.ofp.OFPG_ANY)]
def flowdrop(self, match=None, priority=None, hard_timeout=0):
"""Add drop matching flow to a table."""
return self.flowmod(
match=match,
priority=priority,
hard_timeout=hard_timeout,
inst=[])
def flowcontroller(self, match=None, priority=None, inst=None, max_len=96):
"""Add flow outputting to controller."""
if inst is None:
inst = []
return self.flowmod(
match=match,
priority=priority,
inst=[valve_of.apply_actions(
[valve_of.output_controller(max_len)])] + inst)
class ValveGroupEntry(object):
"""Abstraction for a single OpenFlow group entry."""
def __init__(self, table, group_id, buckets):
self.table = table
self.group_id = group_id
self.update_buckets(buckets)
def update_buckets(self, buckets):
self.buckets = tuple(buckets)
def add(self):
"""Return flows to add this entry to the group table."""
ofmsgs = []
ofmsgs.append(self.delete())
ofmsgs.append(valve_of.groupadd(
group_id=self.group_id, buckets=self.buckets))
self.table.entries[self.group_id] = self
return ofmsgs
def modify(self):
"""Return flow to modify an existing group entry."""
assert self.group_id in self.table.entries
self.table.entries[self.group_id] = self
return valve_of.groupmod(group_id=self.group_id, buckets=self.buckets)
def delete(self):
"""Return flow to delete an existing group entry."""
if self.group_id in self.table.entries:
del self.table.entries[self.group_id]
return valve_of.groupdel(group_id=self.group_id)
class ValveGroupTable(object):
"""Wrap access to group table."""
entries = {} # type: dict
@staticmethod
def group_id_from_str(key_str):
"""Return a group ID based on a string key."""
# TODO: does not handle collisions
digest = hashlib.sha256(key_str.encode('utf-8')).digest()
return struct.unpack('<L', digest[:4])[0]
def get_entry(self, group_id, buckets):
if group_id in self.entries:
self.entries[group_id].update_buckets(buckets)
else:
self.entries[group_id] = ValveGroupEntry(
self, group_id, buckets)
return self.entries[group_id]
def delete_all(self):
"""Delete all groups."""
self.entries = {}
return valve_of.groupdel()
| {
"content_hash": "a19f912e34f28bd7213e931c9c18ff12",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 95,
"avg_line_length": 34.98245614035088,
"alnum_prop": 0.5962888665997994,
"repo_name": "wackerly/faucet",
"id": "6d46f40f15e2803a50a6c835af8e5d881018b33f",
"size": "5982",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "faucet/valve_table.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2525"
},
{
"name": "Python",
"bytes": "1022623"
},
{
"name": "Shell",
"bytes": "10272"
}
],
"symlink_target": ""
} |
"""
elementary ballistic problem
"""
import numpy as np
from scipy import constants
import helper
### initial conditions ###
# initial position
x0 = 0
y0 = 125
s0 = (x0,y0)
# scalar inital speed
v0s = 12
# angle
theta = np.pi/6
# define acceleration vector - acceleration is constant here
g = constants.physical_constants['standard acceleration of gravity'][0]
a = np.array([0, -g ])
### end initial conditions ###
# from initial condition get vector form of speed
v0 = helper.polarToCartesian(np.array([v0s, theta]))
print "vo"
print v0
print "a: "+str(a)
print "v0: "+str(v0)
print "s0: "+str(s0)
"""
speed in function of time
@arg t: time
@return v: speed
"""
def v(t):
return a*t +v0
"""
position in function of time
@arg t: time
@return s: position
"""
def s(t):
return .5*a*(t**2) + v0*t + s0
"""
find time in function of x
@return t
@param x: position
"""
def tFromX(x):
return (x-s0[0])/v0[0]
"""
get position (x and y) from x
"""
def sfromX(x):
return s(tFromX(x))
"""
find time in function of y
@return t
@param y: position
"""
def tFromY(y):
s = helper.solve2ndDegEq(.5*a[1], v0[1], s0[1]-y) # solve 2nd deg equation
f = max(s) # take max of two results (most likely is the other negative)
return f.real # and return real part to avoid confussion
"""
find t that maximizes height
"""
tMaxHeight = -v0[1]/a[1]
"""
computes associated position
"""
sMaxHeight = s(tMaxHeight)
print "Missile reaches highest point at time "+str(tMaxHeight)+" and position "+str(sMaxHeight)
"""
find t when y=0
"""
tZeroHeight = tFromY(0)
sZeroHeight = s(tZeroHeight)
print "Missile reaches floor at time "+str(tZeroHeight)+" and position "+str(sZeroHeight)
#print tFromX(6.3561497)
#print sfromX(6.3561497)
#print tFromY(126.83486239)
import matplotlib.pyplot as plt
t = np.arange(0, tZeroHeight, .0002)
t = t.reshape(len(t),-1).transpose().transpose()
labels = ['x trajectory', 'y trajectory']
print s(t)
for y, label in zip(s(t).transpose(), labels):
plt.plot(t, y, label = label)
plt.xlabel('time [s]')
plt.ylabel('trajectory [m]')
plt.legend()
plt.savefig("ballistic.png")
plt.show()
#print s | {
"content_hash": "df5badfc3d946a421eeb79ae58e41689",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 95,
"avg_line_length": 17.125984251968504,
"alnum_prop": 0.663448275862069,
"repo_name": "jboissard/mathExperiments",
"id": "db4bd793f2b257cc3fac7d6eaa34d3d5c1c3b0bd",
"size": "2175",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "physics/ballistic.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "11839"
},
{
"name": "Perl",
"bytes": "275"
},
{
"name": "Python",
"bytes": "26265"
}
],
"symlink_target": ""
} |
from nose.tools import eq_
import amo.tests
from amo.urlresolvers import reverse
from users.models import UserProfile
from mkt.api.models import Access
from mkt.site.fixtures import fixture
class TestAPI(amo.tests.TestCase):
fixtures = fixture('user_999')
def setUp(self):
self.profile = UserProfile.objects.get(pk=999)
self.user = self.profile.user
self.login(self.profile)
self.url = reverse('mkt.developers.apps.api')
def test_logged_out(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_non_url(self):
res = self.client.post(
self.url,
{'app_name': 'test', 'redirect_uri': 'mailto:cvan@example.com'})
self.assertFormError(res, 'form', 'redirect_uri', ['Enter a valid value.'])
def test_create(self):
Access.objects.create(user=self.user, key='foo', secret='bar')
res = self.client.post(
self.url,
{'app_name': 'test', 'redirect_uri': 'https://example.com/myapp'})
self.assertNoFormErrors(res)
eq_(res.status_code, 200)
consumers = Access.objects.filter(user=self.user)
eq_(len(consumers), 2)
eq_(consumers[1].key, 'mkt:999:regular@mozilla.com:1')
def test_delete(self):
a = Access.objects.create(user=self.user, key='foo', secret='bar')
res = self.client.post(self.url, {'delete': 'yep', 'consumer': a.pk})
eq_(res.status_code, 200)
eq_(Access.objects.filter(user=self.user).count(), 0)
def test_admin(self):
self.grant_permission(self.profile, 'What:ever', name='Admins')
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(Access.objects.filter(user=self.user).count(), 0)
def test_other(self):
self.grant_permission(self.profile, 'What:ever')
res = self.client.post(
self.url,
{'app_name': 'test', 'redirect_uri': 'http://example.com/myapp'})
eq_(res.status_code, 200)
eq_(Access.objects.filter(user=self.user).count(), 1)
| {
"content_hash": "0dc8afb9175650aee23d0f463bd8202f",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 83,
"avg_line_length": 35.779661016949156,
"alnum_prop": 0.6167693036475604,
"repo_name": "spasovski/zamboni",
"id": "07eaa37f3e3f0aa9329dd613d5059c6897e40782",
"size": "2111",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "mkt/developers/tests/test_views_api.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4145"
},
{
"name": "CSS",
"bytes": "885279"
},
{
"name": "JavaScript",
"bytes": "1677601"
},
{
"name": "Puppet",
"bytes": "13808"
},
{
"name": "Python",
"bytes": "6279560"
},
{
"name": "Shell",
"bytes": "19774"
}
],
"symlink_target": ""
} |
from .. import syntax, prims
from ..analysis import OffsetAnalysis, SyntaxVisitor
from ..ndtypes import ArrayT, ScalarT, SliceT, TupleT, NoneT
from ..syntax import unwrap_constant, Expr
import shape
import shape_from_type
from shape import (Var, Const, Shape, Tuple, Closure, Slice, Scalar, Unknown,
ConstSlice, Struct, AnyScalar, Add, Mult, Div, Sub, Mod,
any_scalar, unknown_value, const, any_value, combine_list,
increase_rank, make_shape, is_zero, is_one, Ptr,
dims, combine_dims
)
class ShapeInferenceFailure(Exception):
def __init__(self, value, fn):
self.value = value
self.fn = fn
def __str__(self):
return "Couldn't infer shape of %s in function %s" % (self.value, self.fn.name)
counter = 0
class ShapeInference(SyntaxVisitor):
def size_along_axis(self, value, axis):
assert isinstance(value, Shape)
return value.dims[axis]
def is_tuple(self, x):
return isinstance(x, Tuple)
def is_none(self, x):
return isinstance(x, Const) and x.value is None
def rank(self, value):
if isinstance(value, Shape):
return value.rank
else:
return 0
def max_rank(self, values):
return max(self.rank(v) for v in values)
def int(self, x):
return const(x)
def bool(self, x):
return const(x)
_scalar_shape_classes = (Const, Var, Add, Sub, Mult, Div, Mod, AnyScalar)
def add(self, x, y):
cx = x.__class__
cy = y.__class__
if cx in self._scalar_shape_classes and cy in self._scalar_shape_classes:
if is_zero(x):
return y
elif is_zero(y):
return x
elif cx is Const and cy is Const:
return const(x.value + y.value)
elif cx is AnyScalar or cy is AnyScalar:
return any_scalar
else:
return Add(x,y)
else:
return any_value
def sub(self, x, y):
cx = x.__class__
cy = y.__class__
if cx in self._scalar_shape_classes and cy in self._scalar_shape_classes:
if is_zero(y):
return x
elif cx is Const and cy is Const:
return const(x.value - y.value)
elif cx is AnyScalar or cy is AnyScalar:
return any_scalar
elif x == y:
return Const(0)
else:
return Sub(x, y)
else:
return any_value
def mul(self, x, y):
cx = x.__class__
cy = y.__class__
if cx in self._scalar_shape_classes and cy in self._scalar_shape_classes:
if is_zero(x) or is_zero(y):
return const(0)
elif is_one(x):
return y
elif is_one(y):
return x
elif cx is AnyScalar or cy is AnyScalar:
return any_scalar
else:
return Mult(x,y)
else:
return any_value
def div(self, x, y):
assert not is_zero(y), "Encountered divide by zero during shape inference"
cx = x.__class__
cy = y.__class__
if cx in self._scalar_shape_classes and cy in self._scalar_shape_classes:
if is_one(y):
return x
elif cx is AnyScalar or cy is AnyScalar:
return any_scalar
elif cx is Const and cy is Const:
return const(int(x.value / y.value))
elif x == y:
return const(1)
else:
return Div(x, y)
else:
return any_value
def shape(self, x):
if isinstance(x, Shape):
return Tuple(x.dims)
else:
return Tuple(())
def elt_type(self, x):
return "DON'T CARE ABOUT ELT TYPES"
def alloc_array(self, _, dims):
return make_shape(dims)
def index(self, arr, idx):
if isinstance(arr, Scalar):
return arr
assert arr.__class__ is Shape
if isinstance(idx, (Scalar, Slice, ConstSlice)):
indices = [idx]
elif idx.__class__ is Tuple:
indices = idx.elts
else:
assert False, "Unexpected index: %s" % (idx,)
result_dims = []
for (i, curr_idx) in enumerate(indices):
old_dim = arr.dims[i]
if curr_idx is None or \
(isinstance(curr_idx, Const) and curr_idx.value is None):
result_dims.append(old_dim)
elif isinstance(curr_idx, Scalar):
pass
elif curr_idx.__class__ is ConstSlice:
result_dims.append(curr_idx.nelts)
elif curr_idx.__class__ is Shape:
if len(curr_idx.dims) == 0:
# same as unknown scalar
pass
else:
assert len(curr_idx.dims) == 1, "Indexing by a multi-dimensional array not yet supported"
result_dims.append(curr_idx.dims[0])
else:
assert curr_idx.__class__ is Slice, "Unsupported index %s" % curr_idx
if curr_idx.start is None:
lower = const(0)
elif isinstance(curr_idx.start, Const):
if curr_idx.start.value is None:
lower = const(0)
elif curr_idx.start.value < 0:
lower = self.sub(old_dim, curr_idx.start)
else:
lower = curr_idx.start
else:
lower = any_scalar
if curr_idx.stop is None:
upper = old_dim
elif isinstance(curr_idx.stop, Const):
if curr_idx.stop.value is None:
upper = old_dim
elif curr_idx.stop.value < 0:
upper = self.sub(old_dim, curr_idx.stop)
else:
upper = curr_idx.stop
else:
upper = any_scalar
n = self.sub(upper, lower)
step = curr_idx.step
if step and \
isinstance(step, Const) and \
step.value is not None and \
step.value != 1:
n = self.div(n, step)
result_dims.append(n)
n_original = len(arr.dims)
n_idx= len(indices)
if n_original > n_idx:
result_dims.extend(arr.dims[n_idx:])
return make_shape(result_dims)
def slice_along_axis(self, arr, axis):
if arr.__class__ is Shape:
dims = arr.dims[:axis] + arr.dims[(axis+1):]
if len(dims) > 0:
return Shape(dims)
else:
return any_scalar
else:
return arr
def tuple(self, elts):
return Tuple(tuple(elts))
def concat_tuples(self, t1, t2):
return Tuple(t1.elts + t2.elts)
def setidx(self, arr, idx, v):
pass
def loop(self, start_idx, stop_idx, body):
body(start_idx)
class Accumulator(object):
def __init__(self, v):
self.v = v
def update(self, new_v):
self.v = new_v
def get(self):
return self.v
def accumulate_loop(self, start_idx, stop_idx, body, init):
acc = self.Accumulator(init)
body(acc, start_idx)
return acc.get()
def check_equal_sizes(self, sizes):
pass
def slice_value(self, start, stop, step):
# if all elements of the slice are constant
# then we can ignore the exact start/stop/step
# and track only the number of elements in the
# slice
if start.__class__ is Const and \
stop.__class__ is Const and \
stop.value is not None and \
step.__class__ is Const:
start_val = start.value
if start_val is None:
start_val = 0
step_val = step.value
if step_val is None:
step_val = 1
nelts = (stop.value - start_val) / step_val
# TODO:
# Properly handle negative slicing
if nelts >= 0:
return ConstSlice(nelts)
return Slice(start, stop, step)
def call(self, fn, args):
if fn.__class__ is Closure:
args = tuple(fn.args) + tuple(args)
fn = fn.fn
return symbolic_call(fn, args)
def invoke(self, fn, args):
return self.call(fn, args)
none = None
null_slice = slice(None, None, None)
def identity_function(self, x):
return x
def visit_fn(self, fn):
assert isinstance(fn, syntax.TypedFn), "Expected typed function, got %s" % fn
self.fn = fn
self.value_env = {}
self.equivalence_classes = {}
self.known_offsets = OffsetAnalysis().visit_fn(fn)
arg_types = [fn.type_env[name] for name in fn.arg_names]
input_values = shape_from_type.Converter().from_types(arg_types)
for n,v in zip(fn.arg_names, input_values):
self.value_env[n] = v
self.visit_block(fn.body)
def unify_scalar_var(self, x, y):
"""
Unification is different than combining in that it imposes constraints on
the program. If, for example, we're unifying some scalar that's reached the
top of the lattice (and thus know nothing about it statically), with a
scalar known to be some constant-- then the result is we expect both
variables to be equal to that constant.
"""
assert isinstance(x, Var), "Expected scalar variable, but got: " + str(x)
assert isinstance(y, Scalar), "Expected scalar, but got: " + str(y)
if y == any_scalar:
return x
equivs = self.equivalence_classes.get(x, set([]))
equivs.add(y)
for var in equivs:
self.equivalence_classes[var] = equivs
if isinstance(y, Const):
for var in equivs:
self.value_env[var] = y
return y
else:
return var
def unify_scalar_pairs(self, xs, ys):
result_elts = []
for xi, yi in zip(xs.elts, ys.elts):
result_elts.append(self.unify_scalars(xi, yi))
return result_elts
def unify_scalar_list(self, values):
assert len(values) > 0
acc = any_scalar
for v in values:
acc = self.unify_scalars(acc, v)
return acc
def unify_scalars(self, x, y):
if isinstance(x, Unknown):
return y
elif isinstance(y, Unknown):
return x
elif isinstance(x, Var):
return self.unify_scalar_var(x, y)
elif isinstance(y, Var):
return self.unify_scalar_var(y, x)
else:
raise RuntimeError("Unsupported by unify: %s, %s" % (x,y))
def visit_merge_loop_start(self, merge):
for (k, (l, _)) in merge.iteritems():
self.value_env[k] = self.visit_expr(l)
def visit_merge_loop_repeat(self, merge):
self.visit_merge(merge)
def visit_merge(self, merge):
for (k, (l,r)) in merge.iteritems():
new_l = self.visit_expr(l)
new_r = self.visit_expr(r)
self.value_env[k] = new_l.combine(new_r)
def visit_expr(self, expr):
abstract_shape = SyntaxVisitor.visit_expr(self, expr)
assert abstract_shape is not None, \
"Unsupported expression in shape inference: %s" % expr.node_type()
return abstract_shape
def visit_Alloc(self, expr):
return Ptr(any_scalar)
def visit_Cast(self, expr):
return any_scalar
def visit_TypeValue(self, expr):
return unknown_value
def visit_Struct(self, expr):
if isinstance(expr.type, ArrayT):
shape_tuple = self.visit_expr(expr.args[1])
return make_shape(shape_tuple.elts)
elif isinstance(expr.type, TupleT):
return Tuple(self.visit_expr_list(expr.args))
elif isinstance(expr.type, SliceT):
start, stop, step = self.visit_expr_list(expr.args)
return Slice(start, stop, step)
else:
return unknown_value
def visit_Fn(self, fn):
return Closure(fn, [])
def visit_UntypedFn(self, fn):
return Closure(fn, [])
def visit_TypedFn(self, fn):
return Closure(fn, [])
def shape_from_tuple(self, expr):
shape_tuple = self.visit_expr(expr)
if shape_tuple.__class__ is Tuple:
return make_shape(tuple(shape_tuple.elts))
elif shape_tuple.__class__ is Const:
return make_shape((shape_tuple.value,))
else:
return make_shape( (any_scalar,) * expr.type.rank)
def tuple_from_shape(self, expr):
shape = self.visit_expr(expr)
if shape.__class__ is Shape:
return Tuple(tuple(shape.dims))
elif shape.__class__ is Const:
return Tuple( (shape.value,) )
else:
return Tuple( (any_scalar,) * expr.type.rank)
def visit_ArrayView(self, expr):
return self.shape_from_tuple(expr.shape)
def visit_Reshape(self, expr):
return self.shape_from_tuple(expr.shape)
def visit_Shape(self, expr):
return self.tuple_from_shape(expr.array)
def visit_Transpose(self, expr):
shape = self.visit_expr(expr.array)
if shape.__class__ is Shape:
return Shape(tuple(reversed(shape.dims)))
else:
return shape
def visit_AllocArray(self, expr):
return self.shape_from_tuple(expr.shape)
def visit_Array(self, expr):
elts = self.visit_expr_list(expr.elts)
elt = combine_list(elts)
n = len(elts)
res = increase_rank(elt, 0, const(n))
return res
def visit_ConstArray(self, expr):
return self.shape_from_tuple(expr.shape)
def visit_ConstArrayLike(self, expr):
return self.visit_expr(expr.array)
def ravel(self, shape):
if isinstance(shape, Shape):
nelts = const(1)
for dim in shape.dims:
nelts = self.mul(nelts, dim)
return Shape((nelts,))
else:
return any_value
def visit_Ravel(self, expr):
shape = self.visit_expr(expr.array)
return self.ravel(shape)
def visit_Range(self, expr):
start = self.visit_expr(expr.start)
stop = self.visit_expr(expr.stop)
step = self.visit_expr(expr.step)
slice_value = self.slice_value(start, stop, step)
if slice_value.__class__ is ConstSlice:
return Shape( (slice_value.nelts,))
else:
return Shape( (any_scalar,) )
def visit_Slice(self, expr):
step = self.visit_expr(expr.step)
if expr.start.__class__ is syntax.Var and \
expr.stop.__class__ is syntax.Var and \
step.__class__ is Const:
start_name = expr.start.name
stop_name = expr.stop.name
offsets = self.known_offsets.get(stop_name, [])
step = step.value if step.value else 1
for (other_var, offset) in offsets:
if other_var == start_name:
nelts = (offset + step - 1) / step
# assert False, (start_name, stop_name, offsets)
return ConstSlice(nelts)
start = self.visit_expr(expr.start)
stop = self.visit_expr(expr.stop)
return self.slice_value(start, stop, step)
def visit_Const(self, expr):
return Const(expr.value)
def visit_ClosureElt(self, expr):
clos = self.visit_expr(expr.closure)
assert clos.__class__ is Closure, \
"Unexpected closure shape %s for expression %s" % (clos, expr)
return clos.args[expr.index]
def visit_TupleProj(self, expr):
t = self.visit_expr(expr.tuple)
assert isinstance(t, Tuple), "Expected tuple type but got %s : %s" % (t, type(t))
return t.elts[expr.index]
def visit_Attribute(self, expr):
v = self.visit_expr(expr.value)
name = expr.name
if v.__class__ is Shape:
if name == 'shape':
return Tuple(v.dims)
elif name == 'strides':
return Tuple((any_scalar,) * len(v.dims) )
elif name in ('offset', 'size', 'nelts'):
return any_scalar
elif name == 'data':
return Ptr(any_scalar)
elif v.__class__ is Tuple:
if name.startswith('elt'):
idx = int(name[3:])
else:
idx = int(name)
return v[idx]
elif v.__class__ is Slice:
return getattr(v, name)
elif v.__class__ is Closure:
if name.startswith('elt'):
idx = int(name[3:])
elif name.startswith('closure_elt'):
idx = int(name[len('closure_elt'):])
else:
idx = int(name)
return v.args[idx]
elif v.__class__ is Struct:
return v.values[v.fields.index(name)]
t = expr.value.type.field_type(name)
if isinstance(t, ScalarT):
return any_scalar
else:
return any_value
def visit_PrimCall(self, expr):
p = expr.prim
args = self.visit_expr_list(expr.args)
if p == prims.add:
return self.add(args[0], args[1])
elif p == prims.subtract:
return self.sub(args[0], args[1])
elif p == prims.multiply:
return self.mul(args[0], args[1])
elif p == prims.divide:
return self.div(args[0], args[1])
else:
result = shape.combine_list(args, preserve_const = False)
if result.__class__ is Shape:
return result
else:
# once a scalar passes through some prim, it's not longer the same value!
return any_scalar
def visit_Select(self, expr):
cond = self.visit_expr(expr.cond)
falseval = self.visit_expr(expr.true_value)
trueval = self.visit_expr(expr.false_value)
return cond.combine(falseval).combine(trueval)
def visit_Var(self, expr):
name = expr.name
if name in self.value_env:
return self.value_env[name]
elif name in self.equivalence_classes:
for other_name in self.equivalence_classes[name]:
if other_name in self.value_env:
return self.value_env[other_name]
raise RuntimeError("Unknown variable: %s in function %s" % (expr, self.fn.name))
def visit_Tuple(self, expr):
return Tuple(self.visit_expr_list(expr.elts))
def visit_Call(self, expr):
fn = self.visit_expr(expr.fn)
args = self.visit_expr_list(expr.args)
return symbolic_call(fn, args)
def visit_Closure(self, clos):
assert not isinstance(clos.fn, str), \
"[ShapeInference] Function names in closures not supported: " + clos.fn
fn = self.visit_expr(clos.fn)
closure_arg_shapes = self.visit_expr_list(clos.args)
if fn.__class__ is Closure:
closure_arg_shapes = tuple(fn.args) + tuple(closure_arg_shapes)
fn = fn.fn
return Closure(fn, closure_arg_shapes)
def visit_Index(self, expr):
arr = self.visit_expr(expr.value)
idx = self.visit_expr(expr.index)
if arr.__class__ is Tuple and idx.__class__ is Const:
return arr[idx.value]
elif arr.__class__ is Shape:
if isinstance(idx, Scalar):
return shape.lower_rank(arr, 0)
elif idx.__class__ is Shape:
assert len(idx.dims) <= len(arr.dims), \
"Can't index into rank %d array with rank %d indices" % \
(len(arr.dims), len(idx.dims))
dims = [d for d in arr.dims]
for (i,d) in enumerate(idx.dims):
dims[i] = d
return shape.make_shape(dims)
else:
return self.index(arr, idx)
elif arr.__class__ is Ptr:
assert isinstance(arr.elt_shape, Scalar)
assert isinstance(idx, Scalar)
return any_scalar
if isinstance(arr, Scalar):
assert False, "Expected %s to be array, shape inference found scalar" % (arr,)
elif arr == shape.any_value:
raise ShapeInferenceFailure(expr, self.fn)
assert False, \
"Can't index (%s) with array shape %s and index shape %s" % \
(expr, arr, idx)
def visit_IndexMap(self, expr):
bounds = self.visit_expr(expr.shape)
clos = self.visit_expr(expr.fn)
if isinstance(clos.fn.input_types[-1], TupleT) or bounds.__class__ is not Tuple:
indices = [bounds]
else:
indices = bounds.elts
elt_result = symbolic_call(clos, indices)
return make_shape(combine_dims(bounds, elt_result))
def visit_IndexReduce(self, expr):
fn = self.visit_expr(expr.fn)
combine = self.visit_expr(expr.combine)
bounds = self.visit_expr(expr.shape)
if isinstance(fn.fn.input_types[-1], TupleT) or bounds.__class__ is not Tuple:
indices = [bounds]
else:
indices = bounds.elts
elt_shape = symbolic_call(fn, indices)
init_shape = elt_shape if self.expr_is_none(expr.init) else self.visit_expr(expr.init)
return symbolic_call(combine, [init_shape, elt_shape])
def visit_IndexScan(self, expr):
fn = self.visit_expr(expr.fn)
combine = self.visit_expr(expr.combine)
emit = self.visit_expr(expr.emit)
bounds = self.visit_expr(expr.shape)
if isinstance(fn.fn.input_types[-1], TupleT) or bounds.__class__ is not Tuple:
indices = [bounds]
else:
indices = bounds.elts
elt_shape = symbolic_call(fn, indices)
init_shape = elt_shape if self.expr_is_none(expr.init) else self.visit_expr(expr.init)
acc_shape = symbolic_call(combine, [init_shape, elt_shape])
output_elt_shape = symbolic_call(emit, [acc_shape])
return make_shape(combine_dims(bounds, output_elt_shape))
def normalize_axes(self, axis, args):
if isinstance(axis, Expr):
axis = unwrap_constant(axis)
if isinstance(axis,tuple):
axes = axis
else:
axes = (axis,) * len(args)
assert len(axes) == len(args), \
"Mismatch between args %s and axes %s" % (args, axis)
return axes
def adverb_elt_shapes(self, arg_shapes, axes):
"""
Slice into array shapes along the specified axis
"""
elt_shapes = []
for arg_shape, axis in zip(arg_shapes, axes):
if axis is None:
elt_shapes.append(any_scalar)
elif axis < self.rank(arg_shape):
elt_shapes.append(self.slice_along_axis(arg_shape, axis))
else:
elt_shapes.append(arg_shape)
return elt_shapes
def inner_map_result_shape(self, elt_result, arg_shapes, axes):
max_rank = self.max_rank(arg_shapes)
for i, arg_shape in enumerate(arg_shapes):
r = self.rank(arg_shape)
if r == max_rank:
axis = axes[i]
if axis is None:
combined_dims = dims(arg_shape) + dims(elt_result)
if len(combined_dims) > 0:
return Shape(combined_dims)
else:
return any_scalar
else:
return increase_rank(elt_result, 0, arg_shape.dims[axis])
return elt_result
def outer_map_result_shape(self, elt_result, arg_shapes, axes):
result_dims = list(dims(elt_result))
for i, arg_shape in enumerate(arg_shapes):
r = self.rank(arg_shape)
if r > 0:
axis = axes[i]
if axis is None:
result_dims.extend(arg_shape.dims)
else:
result_dims.append(arg_shape.dims[axis])
return make_shape(result_dims)
def visit_Map(self, expr):
arg_shapes = self.visit_expr_list(expr.args)
fn = self.visit_expr(expr.fn)
axes = self.normalize_axes(expr.axis, expr.args)
elt_shapes = self.adverb_elt_shapes(arg_shapes, axes)
elt_result = symbolic_call(fn, elt_shapes)
return self.inner_map_result_shape(elt_result, arg_shapes, axes)
def expr_is_none(self, expr):
return expr is None or expr.type.__class__ is NoneT
def visit_Reduce(self, expr):
fn = self.visit_expr(expr.fn)
combine = self.visit_expr(expr.combine)
arg_shapes = self.visit_expr_list(expr.args)
axes = self.normalize_axes(expr.axis, expr.args)
elt_shapes = self.adverb_elt_shapes(arg_shapes, axes)
elt_result = symbolic_call(fn, elt_shapes)
init = elt_result if self.expr_is_none(expr.init) else self.visit_expr(expr.init)
return symbolic_call(combine, [init, elt_result])
def visit_Scan(self, expr):
fn = self.visit_expr(expr.fn)
combine = self.visit_expr(expr.combine)
arg_shapes = self.visit_expr_list(expr.args)
axes = self.normalize_axes(expr.axis, expr.args)
elt_shapes = self.adverb_elt_shapes(arg_shapes, axes)
elt_result = symbolic_call(fn, elt_shapes)
init = elt_result if self.expr_is_none(expr.init) else self.visit_expr(expr.init)
acc_shape = symbolic_call(combine, [init, elt_result])
emit = self.visit_expr(expr.emit)
emit_shape = symbolic_call(emit, [acc_shape])
return self.inner_map_result_shape(emit_shape, arg_shapes, axes)
def visit_OuterMap(self, expr):
fn = self.visit_expr(expr.fn)
arg_shapes = self.visit_expr_list(expr.args)
axes = self.normalize_axes(expr.axis, expr.args)
elt_shapes = self.adverb_elt_shapes(arg_shapes, axes)
elt_result = symbolic_call(fn, elt_shapes)
return self.outer_map_result_shape(elt_result, arg_shapes, axes)
def visit_Assign(self, stmt):
rhs = self.visit_expr(stmt.rhs)
if stmt.lhs.__class__ in (syntax.Var, syntax.Tuple):
bind_syntax(stmt.lhs, rhs, self.value_env)
def visit_Return(self, stmt):
new_value = self.visit_expr(stmt.value)
old_value = self.value_env.get("$return", unknown_value)
combined = old_value.combine(new_value)
self.value_env["$return"] = combined
def visit_ForLoop(self, stmt):
self.value_env[stmt.var.name] = any_scalar
SyntaxVisitor.visit_ForLoop(self, stmt)
# visit body a second time in case first-pass fixed values relative
# to initial value of iteration vars
self.visit_block(stmt.body)
def visit_While(self, stmt):
SyntaxVisitor.visit_While(self, stmt)
# visit body a second time in case first-pass fixed values relative
# to initial value of iteration vars
self.visit_block(stmt.body)
_shape_env_cache = {}
def shape_env(typed_fn):
key = typed_fn.cache_key
if key in _shape_env_cache:
return _shape_env_cache[key]
shape_inference = ShapeInference()
shape_inference.visit_fn(typed_fn)
env = shape_inference.value_env
_shape_env_cache[key] = env
return env
_shape_cache = {}
def call_shape_expr(typed_fn):
key = typed_fn.cache_key
if key in _shape_cache:
return _shape_cache[key]
env = shape_env(typed_fn)
abstract_shape = env.get("$return", Const(None))
_shape_cache[key] = abstract_shape
return abstract_shape
def bind_syntax(lhs, rhs, env):
if isinstance(lhs, syntax.Tuple):
assert isinstance(rhs, Tuple), "Expected tuple on RHS of binding %s = %s" % (lhs,rhs)
for l,r in zip(lhs.elts, rhs.elts):
bind_syntax(l, r, env)
elif isinstance(lhs, syntax.Var):
env[lhs.name] = rhs
def bind(lhs, rhs, env):
if isinstance(lhs, Var):
env[lhs] = rhs
elif isinstance(lhs, Shape):
assert isinstance(rhs, Shape), "Expected %s, got %s" % (lhs, rhs)
bind_pairs(lhs.dims, rhs.dims, env)
elif isinstance(lhs, Closure):
assert isinstance(rhs, Closure)
bind_pairs(lhs.args, rhs.args, env)
elif isinstance(lhs, Tuple):
if rhs == any_value:
bind_pairs(lhs.elts, [any_value for _ in lhs.elts], env)
elif lhs == unknown_value:
bind_pairs(lhs.elts, [unknown_value for _ in lhs.elts], env)
else:
assert isinstance(rhs, Tuple), "Expected tuple on RHS of binding %s = %s" % (lhs,rhs)
bind_pairs(lhs.elts, rhs.elts, env)
else:
raise RuntimeError("Unexpected shape LHS: %s" % lhs)
def bind_pairs(xs, ys, env):
assert len(xs) == len(ys), \
"Can't bind %s and %s due to unequal lengths" % (xs, ys)
for (x,y) in zip(xs,ys):
bind(x,y,env)
def subst(x, env):
if isinstance(x, Var):
assert x in env, "Unknown variable %s" % x
return env[x]
elif isinstance(x, Scalar):
return x
elif isinstance(x, Shape):
return make_shape(subst_list(x.dims, env))
elif isinstance(x, Tuple):
return Tuple(tuple((subst_list(x.elts, env))))
elif isinstance(x, Closure):
return Closure(x.fn, subst_list(x.args, env))
elif isinstance(x, Ptr):
return Ptr(subst(x.elt_shape, env))
else:
raise RuntimeError("Unexpected abstract expression: %s" % x)
def subst_list(xs, env):
return [subst(x, env) for x in xs]
def symbolic_call(fn, abstract_inputs):
# result in terms of variables like input0, (shape: input1, input2), etc..
if fn.__class__ is Closure:
closure_elts = tuple(fn.args)
fn = fn.fn
else:
closure_elts = ()
abstract_result_value = call_shape_expr(fn)
conv = shape_from_type.Converter()
shape_formals = conv.from_types(fn.input_types)
env = {}
bind_pairs(shape_formals, closure_elts + tuple(abstract_inputs), env)
return subst(abstract_result_value, env)
| {
"content_hash": "c0d01072ba2120677c577a5d7d773b5f",
"timestamp": "",
"source": "github",
"line_count": 897,
"max_line_length": 99,
"avg_line_length": 30.552954292084728,
"alnum_prop": 0.6176749616872218,
"repo_name": "pombredanne/parakeet",
"id": "08778b741f6d23b8259fef8369905a39c18e6deb",
"size": "27407",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "parakeet/shape_inference/shape_inference.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1008397"
}
],
"symlink_target": ""
} |
from django.test import TestCase
from django.contrib.auth.models import Group
from hs_access_control.models import PrivilegeCodes
from hs_core import hydroshare
from hs_core.testing import MockIRODSTestCaseMixin
from hs_access_control.tests.utilities import global_reset, is_equal_to_as_set
class UnitTests(MockIRODSTestCaseMixin, TestCase):
""" test basic behavior of each routine """
def setUp(self):
super(UnitTests, self).setUp()
global_reset()
self.group, _ = Group.objects.get_or_create(name='Resource Author')
self.alva = hydroshare.create_account(
'alva@gmail.com',
username='alva',
first_name='alva',
last_name='couch',
superuser=False,
groups=[]
)
self.george = hydroshare.create_account(
'george@gmail.com',
username='george',
first_name='george',
last_name='miller',
superuser=False,
groups=[]
)
self.john = hydroshare.create_account(
'john@gmail.com',
username='john',
first_name='john',
last_name='miller',
superuser=False,
groups=[]
)
self.admin = hydroshare.create_account(
'admin@gmail.com',
username='admin',
first_name='first_name_admin',
last_name='last_name_admin',
superuser=True,
groups=[]
)
# george creates a resource 'bikes'
self.bikes = hydroshare.create_resource(
resource_type='GenericResource',
owner=self.george,
title='Bikes',
metadata=[],
)
# george creates a group 'bikers'
self.bikers = self.george.uaccess.create_group(
title='Bikers', description="We rae the bikers")
def test_user_create_group(self):
george = self.george
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.view_groups,
[bikers]))
foo = george.uaccess.create_group(
title='Foozball', description="We are the foozball")
self.assertTrue(
is_equal_to_as_set(
george.uaccess.view_groups, [
foo, bikers]))
def test_user_delete_group(self):
george = self.george
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.view_groups,
[bikers]))
george.uaccess.delete_group(bikers)
self.assertTrue(is_equal_to_as_set(george.uaccess.view_groups, []))
def test_user_owned_groups(self):
george = self.george
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.owned_groups,
[bikers]))
def test_user_owns_group(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.owns_group(bikers))
self.assertFalse(alva.uaccess.owns_group(bikers))
def test_user_can_change_group(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.can_change_group(bikers))
self.assertFalse(alva.uaccess.can_change_group(bikers))
def test_user_can_view_group(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.can_view_group(bikers))
bikers.gaccess.public = False
bikers.save()
self.assertFalse(alva.uaccess.can_view_group(bikers))
def test_user_can_view_group_metadata(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.can_view_group_metadata(bikers))
bikers.gaccess.public = False
bikers.gaccess.discoverable = False
bikers.save()
self.assertFalse(alva.uaccess.can_view_group_metadata(bikers))
def test_user_can_change_group_flags(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.can_change_group_flags(bikers))
self.assertFalse(alva.uaccess.can_change_group_flags(bikers))
def test_user_can_delete_group(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.can_delete_group(bikers))
self.assertFalse(alva.uaccess.can_delete_group(bikers))
def test_user_can_share_group(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(
george.uaccess.can_share_group(
bikers, PrivilegeCodes.VIEW))
self.assertFalse(
alva.uaccess.can_share_group(
bikers, PrivilegeCodes.VIEW))
def test_user_can_share_group_with_user(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(
george.uaccess.can_share_group_with_user(
bikers, alva, PrivilegeCodes.VIEW))
self.assertFalse(
alva.uaccess.can_share_group_with_user(
bikers, george, PrivilegeCodes.VIEW))
def test_user_share_group_with_user(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.members, [george]))
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.members, [
george, alva]))
def test_user_unshare_group_with_user(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.members, [george]))
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.members, [
george, alva]))
george.uaccess.unshare_group_with_user(bikers, alva)
self.assertTrue(is_equal_to_as_set(bikers.gaccess.members, [george]))
def test_user_can_unshare_group_with_user(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertFalse(
george.uaccess.can_unshare_group_with_user(
bikers, alva))
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertTrue(
george.uaccess.can_unshare_group_with_user(
bikers, alva))
def test_user_get_group_unshare_users(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_group_unshare_users(bikers),
[]))
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_group_unshare_users(bikers),
[alva]))
def test_user_view_resources(self):
george = self.george
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
george.uaccess.view_resources,
[bikes]))
trikes = hydroshare.create_resource(resource_type='GenericResource',
owner=self.george,
title='Trikes',
metadata=[],)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.view_resources, [
bikes, trikes]))
def test_user_owned_resources(self):
george = self.george
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
george.uaccess.owned_resources,
[bikes]))
trikes = hydroshare.create_resource(resource_type='GenericResource',
owner=self.george,
title='Trikes',
metadata=[],)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.owned_resources, [
bikes, trikes]))
def test_user_edit_resources(self):
george = self.george
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
george.uaccess.edit_resources,
[bikes]))
trikes = hydroshare.create_resource(resource_type='GenericResource',
owner=self.george,
title='Trikes',
metadata=[],)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.edit_resources, [
bikes, trikes]))
def test_user_get_resources_with_explicit_access(self):
george = self.george
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resources_with_explicit_access(
PrivilegeCodes.OWNER),
[bikes]))
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resources_with_explicit_access(
PrivilegeCodes.CHANGE), []))
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resources_with_explicit_access(
PrivilegeCodes.VIEW), []))
def test_user_get_groups_with_explicit_access(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_groups_with_explicit_access(
PrivilegeCodes.OWNER),
[bikers]))
self.assertTrue(
is_equal_to_as_set(
alva.uaccess.get_groups_with_explicit_access(
PrivilegeCodes.CHANGE), []))
self.assertTrue(
is_equal_to_as_set(
alva.uaccess.get_groups_with_explicit_access(
PrivilegeCodes.VIEW), []))
def test_user_owns_resource(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(george.uaccess.owns_resource(bikes))
self.assertFalse(alva.uaccess.owns_resource(bikes))
def test_user_can_change_resource(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(george.uaccess.can_change_resource(bikes))
self.assertFalse(alva.uaccess.can_change_resource(bikes))
def test_user_can_change_resource_flags(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(george.uaccess.can_change_resource_flags(bikes))
self.assertFalse(alva.uaccess.can_change_resource_flags(bikes))
def test_user_can_view_resource(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(george.uaccess.can_view_resource(bikes))
self.assertFalse(alva.uaccess.can_view_resource(bikes))
def test_user_can_delete_resource(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(george.uaccess.can_delete_resource(bikes))
self.assertFalse(alva.uaccess.can_delete_resource(bikes))
def test_user_can_share_resource(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(
george.uaccess.can_share_resource(
bikes, PrivilegeCodes.VIEW))
self.assertFalse(
alva.uaccess.can_share_resource(
bikes, PrivilegeCodes.VIEW))
def test_user_can_share_resource_with_user(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(
george.uaccess.can_share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW))
self.assertFalse(
alva.uaccess.can_share_resource_with_user(
bikes, george, PrivilegeCodes.VIEW))
def test_user_can_share_resource_with_group(self):
george = self.george
alva = self.alva
bikes = self.bikes
bikers = self.bikers
self.assertTrue(
george.uaccess.can_share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW))
self.assertFalse(
alva.uaccess.can_share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW))
def test_user_share_resource_with_user(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(is_equal_to_as_set(alva.uaccess.view_resources, []))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
alva.uaccess.view_resources,
[bikes]))
def test_user_unshare_resource_with_user(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(is_equal_to_as_set(alva.uaccess.view_resources, []))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
alva.uaccess.view_resources,
[bikes]))
george.uaccess.unshare_resource_with_user(bikes, alva)
self.assertTrue(is_equal_to_as_set(alva.uaccess.view_resources, []))
def test_user_can_unshare_resource_with_user(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertFalse(
george.uaccess.can_unshare_resource_with_user(
bikes, alva))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW)
self.assertTrue(
george.uaccess.can_unshare_resource_with_user(
bikes, alva))
def test_user_share_resource_with_group(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.view_resources, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.view_resources,
[bikes]))
def test_user_unshare_resource_with_group(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.view_resources, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.view_resources,
[bikes]))
george.uaccess.unshare_resource_with_group(bikes, bikers)
self.assertTrue(is_equal_to_as_set(bikers.gaccess.view_resources, []))
def test_user_can_unshare_resource_with_group(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertFalse(
george.uaccess.can_unshare_resource_with_group(
bikes, bikers))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW)
self.assertTrue(
george.uaccess.can_unshare_resource_with_group(
bikes, bikers))
def test_user_get_resource_unshare_users(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resource_unshare_users(bikes),
[]))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resource_unshare_users(bikes),
[alva]))
def test_user_get_resource_unshare_groups(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resource_unshare_groups(bikes),
[]))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resource_unshare_groups(bikes),
[bikers]))
def test_group_members(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.members, [george]))
george.uaccess.share_group_with_user(
bikers, alva, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.members, [
george, alva]))
def test_group_view_resources(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.view_resources, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.view_resources,
[bikes]))
def test_group_edit_resources(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.edit_resources, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.edit_resources,
[bikes]))
def test_group_get_resources_with_explicit_access(self):
george = self.george
bikers = self.bikers
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.get_resources_with_explicit_access(
PrivilegeCodes.VIEW), []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.get_resources_with_explicit_access(
PrivilegeCodes.CHANGE),
[bikes]))
def test_group_owners(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.owners, [george]))
george.uaccess.share_group_with_user(
bikers, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.owners, [
george, alva]))
def test_group_view_users(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.members, [george]))
george.uaccess.share_group_with_user(
bikers, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.members, [
george, alva]))
def test_group_edit_users(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.edit_users,
[george]))
george.uaccess.share_group_with_user(
bikers, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.edit_users, [
george, alva]))
def test_group_get_effective_privilege(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertEqual(
bikers.gaccess.get_effective_privilege(george),
PrivilegeCodes.OWNER)
self.assertEqual(
bikers.gaccess.get_effective_privilege(alva),
PrivilegeCodes.NONE)
def test_resource_view_users(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(is_equal_to_as_set(bikes.raccess.view_users, [george]))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikes.raccess.view_users, [
george, alva]))
def test_resource_edit_users(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(is_equal_to_as_set(bikes.raccess.edit_users, [george]))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikes.raccess.edit_users, [
george, alva]))
def test_resource_view_groups(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikes.raccess.view_groups, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikes.raccess.view_groups,
[bikers]))
def test_resource_edit_groups(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikes.raccess.edit_groups, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikes.raccess.edit_groups,
[bikers]))
def test_resource_owners(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(is_equal_to_as_set(bikes.raccess.owners, [george]))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikes.raccess.owners, [
george, alva]))
def test_resource_get_effective_user_privilege(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertEqual(
bikes.raccess.get_effective_user_privilege(george),
PrivilegeCodes.OWNER)
self.assertEqual(
bikes.raccess.get_effective_user_privilege(alva),
PrivilegeCodes.NONE)
def test_resource_get_effective_group_privilege(self):
george = self.george
alva = self.alva
bikes = self.bikes
bikers = self.bikers
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertEqual(
bikes.raccess.get_effective_group_privilege(alva),
PrivilegeCodes.NONE)
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertEqual(
bikes.raccess.get_effective_group_privilege(alva),
PrivilegeCodes.CHANGE)
def test_resource_get_effective_privilege(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertEqual(
bikes.raccess.get_effective_privilege(george),
PrivilegeCodes.OWNER)
self.assertEqual(
bikes.raccess.get_effective_privilege(alva),
PrivilegeCodes.NONE)
| {
"content_hash": "3ded6500f1b33d246f9ffee707d4d19d",
"timestamp": "",
"source": "github",
"line_count": 676,
"max_line_length": 79,
"avg_line_length": 35.77218934911242,
"alnum_prop": 0.5714167562649904,
"repo_name": "RENCI/xDCIShare",
"id": "a708c98c7e722b883df4ae4dadc2949f17c2477e",
"size": "24182",
"binary": false,
"copies": "2",
"ref": "refs/heads/xdci-develop",
"path": "hs_access_control/tests/test_units.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "381782"
},
{
"name": "HTML",
"bytes": "964877"
},
{
"name": "JavaScript",
"bytes": "2011819"
},
{
"name": "Python",
"bytes": "4334769"
},
{
"name": "R",
"bytes": "4472"
},
{
"name": "Shell",
"bytes": "52665"
},
{
"name": "XSLT",
"bytes": "790987"
}
],
"symlink_target": ""
} |
import json
import os
import pprint
import sys
import uuid
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from google.cloud import pubsub_v1
from impl.database.database import JsonDatabase
PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT']
PUBSUB_SUBSCRIPTION = 'codelab'
PROCUREMENT_API = 'cloudcommerceprocurement'
def _generate_internal_account_id():
### TODO: Replace with whatever ID generation code already exists. ###
return str(uuid.uuid4())
class Procurement(object):
"""Utilities for interacting with the Procurement API."""
def __init__(self, database):
self.service = build(PROCUREMENT_API, 'v1', cache_discovery=False)
self.database = database
##########################
### Account operations ###
##########################
def _get_account_name(self, account_id):
return 'providers/DEMO-{}/accounts/{}'.format(PROJECT_ID,
account_id)
def get_account(self, account_id):
"""Gets an account from the Procurement Service."""
name = self._get_account_name(account_id)
request = self.service.providers().accounts().get(name=name)
try:
response = request.execute()
return response
except HttpError as err:
if err.resp.status == 404:
return None
def approve_account(self, account_id):
"""Approves the account in the Procurement Service."""
name = self._get_account_name(account_id)
request = self.service.providers().accounts().approve(
name=name, body={'approvalName': 'signup'})
request.execute()
def handle_account_message(self, message):
"""Handles incoming Pub/Sub messages about account resources."""
account_id = message['id']
customer = self.database.read(account_id)
account = self.get_account(account_id)
############################## IMPORTANT ##############################
### In true integrations, Pub/Sub messages for new accounts should ###
### be ignored. Account approvals are granted as a one-off action ###
### during customer sign up. This codelab does not include the sign ###
### up flow, so it chooses to approve accounts here instead. ###
### Production code for real, non-codelab services should never ###
### blindly approve these. The following should be done as a result ###
### of a user signing up. ###
#######################################################################
if account:
approval = None
for account_approval in account['approvals']:
if account_approval['name'] == 'signup':
approval = account_approval
break
if approval:
if approval['state'] == 'PENDING':
# See above note. Actual production integrations should not
# approve blindly when receiving a message.
self.approve_account(account_id)
elif approval['state'] == 'APPROVED':
# Now that it's approved, store a record in the database.
internal_id = _generate_internal_account_id()
customer = {
'procurement_account_id': account_id,
'internal_account_id': internal_id,
'products': {}
}
self.database.write(account_id, customer)
else:
# The account has been deleted, so delete the database record.
if customer:
self.database.delete(account_id)
# Always ack account messages. We only care about the above scenarios.
return True
##############################
### Entitlement operations ###
##############################
def handle_entitlement_message(self):
### TODO: Complete in section 3. ###
return False
def main(argv):
"""Main entrypoint to the integration with the Procurement Service."""
if len(argv) != 1:
print('Usage: python -m impl.step_2_account.app')
return
# Construct a service for the Partner Procurement API.
database = JsonDatabase()
procurement = Procurement(database)
# Get the subscription object in order to perform actions on it.
subscriber = pubsub_v1.SubscriberClient()
subscription_path = subscriber.subscription_path(PROJECT_ID,
PUBSUB_SUBSCRIPTION)
def callback(message):
"""Callback for handling Cloud Pub/Sub messages."""
payload = json.loads(message.data)
print('Received message:')
pprint.pprint(payload)
print()
ack = False
if 'entitlement' in payload:
ack = procurement.handle_entitlement_message()
elif 'account' in payload:
ack = procurement.handle_account_message(payload['account'])
else:
# If there's no account or entitlement, then just ack and ignore the
# message. This should never happen.
ack = True
if ack:
message.ack()
subscription = subscriber.subscribe(subscription_path, callback=callback)
print('Listening for messages on {}'.format(subscription_path))
print('Exit with Ctrl-\\')
while True:
try:
subscription.result()
except Exception as exception:
print('Listening for messages on {} threw an Exception: {}.'.format(
subscription_path, exception))
if __name__ == '__main__':
main(sys.argv)
| {
"content_hash": "6c7c02e78bdd739f3597fe37d8164350",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 80,
"avg_line_length": 35.472727272727276,
"alnum_prop": 0.5600546728173587,
"repo_name": "googlecodelabs/gcp-marketplace-integrated-saas",
"id": "13b1356c5b6cb4815dafaf9e72e65ad88d79b5e9",
"size": "6428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python2.7/impl/step_2_account/app.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "78723"
},
{
"name": "Python",
"bytes": "111156"
}
],
"symlink_target": ""
} |
"""This program imports the datetime module and returns current date & time."""
import datetime
CURDATE = None
def get_current_date():
"""This function should retrieve the current date and time.
Args:
none: Value specifying degrees Fahrenheit
Returns:
datetime.date.today(): Current system time
Example:
>>> get_current_date()
datetime.date(2015, 3, 8)
"""
return datetime.date.today()
if __name__ == '__main__':
CURDATE = get_current_date()
print CURDATE
| {
"content_hash": "7a975f24672ad9d3875d8bbc5f6536e8",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 79,
"avg_line_length": 21.12,
"alnum_prop": 0.634469696969697,
"repo_name": "neal-rogers/baseball-card-inventory",
"id": "3ca267c1cb0ed289ad90f60ca2af600e43c18ec6",
"size": "574",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Source Files/date-time.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10069"
}
],
"symlink_target": ""
} |
'''export.py
The actions to create different formats from a handbuch.io-`book` live here
'''
import os
from os.path import abspath, dirname, isfile
import sys
import logging
from datetime import datetime
import pypandoc
from bs4 import BeautifulSoup
from .utils import get_siteurl
class Export(object):
def __init__(self, site, src, title, friendly_title, info, tmp_path,
overwrite=True):
self.site = site
self.src = src
self.title = title
self.friendly_title = friendly_title
self.info = info
self.overwrite = overwrite
self.errors = []
self.tmp_path = tmp_path
self.prestart()
def prestart(self):
self.start()
def start(self):
if not self.errors:
self._create()
class PRINTExport(Export):
print_version_title = "_Printversion"
def _create(self):
title = '%s/%s' % (self.title, self.print_version_title)
logging.debug("Printversion title: %s" % (title))
printpage = self.site.Pages[title]
if not self.overwrite and printpage.text():
self.errors.append('Page "%s" already exists' % title)
return
printpage.save(self.src)
class PandocExport(Export):
def prestart(self):
self.outfilename = self.title
if "/" in self.outfilename:
self.outfilename = self.outfilename.replace("/", "_")
else:
self.outfilename += "_live"
self.outfilename += "." + self.outformat
self.outpath = os.path.join(self.tmp_path, self.outfilename)
if not self.overwrite and isfile(self.outpath):
self.errors.append('File "%s" already exists' % self.outfilename)
return
os.environ["HOME"] = self.tmp_path # pandoc needs a $HOME
self.start()
def get_pandoc_params(self):
args = [self.get_soup().prettify(), self.outformat]
kwargs = {
'format': 'html',
'outputfile': self.outpath}
kwargs['extra_args'] = [
'--chapters',
#'--verbose',
'--standalone',
'--toc',
'-M', 'author="%s"' % self.info.get(
"AUTOREN",
self.info.get("HERAUSGEBER", "")),
'-M', 'subtitle="%s"' % self.info.get("ABSTRACT"),
'-M', 'include-before="%s"' % self.info.get("ABSTRACT"),
'-M', 'lang="german"',
'-M', 'mainlang="german"',
'-M', 'title="%s"' % self.friendly_title,
]
return args, kwargs
def get_soup(self):
result = self.site.api(
"parse", text=self.src)
txt = result['parse']['text']['*']
soup = BeautifulSoup(txt, 'html.parser')
[x.extract() for x in soup.findAll('span', class_="mw-editsection")]
# One idea was to use a custom toc. Delete the old one:
toc = soup.find('div', attrs={'id': 'toc'})
if toc:
toc.extract()
site_url = get_siteurl(self.site)
# https://github.com/jgm/pandoc/issues/1886
# mediawikis <h*><span class="mw-headline" id="INTERNAL TARGET">...
# breaks pandoc
# but without the ids internal links do not work.
# so we copy those ids to their parents (the real headline tag)
# before deleting
for tag in soup():
c = tag.get('class')
if c and "mw-headline" in c:
i = tag.get("id")
tag.parent['id'] = i
del tag['id']
c = tag.get('src')
if c and c.startswith("/"):
tag['src'] = "".join((site_url, c),)
#with open(os.path.join(self.tmp_path, "src.html"), "w") as f:
# f.write(str(soup.prettify().encode("utf-8")))
return soup
def upload(self):
msg = '%s: "%s" als %s' % (
datetime.now().strftime("%Y-%M-%d %h:%M"),
self.friendly_title,
self.outformat)
outfile = open(self.outpath, "rb")
try:
res = self.site.upload(
outfile, self.outfilename, msg, ignore=True)
except:
self.errors.append(str("UPLOAD-ERROR: " + str(sys.exc_info())))
return
if res['result'] != "Success":
self.errors.append("Upload failed for %s" % self.outfilename)
def _create(self):
args, kwargs = self.get_pandoc_params()
try:
pypandoc.convert(*args, **kwargs)
except:
self.errors.append(str("PANDOC-ERROR: " + str(sys.exc_info())))
return
self.upload()
class PDFExport(PandocExport):
outformat = "pdf"
def get_pandoc_params(self):
args, kwargs = super().get_pandoc_params()
kwargs['extra_args'] += [
'--latex-engine=xelatex',
'--template=%s/template.latex' % dirname(abspath(__file__)),
]
return args, kwargs
class ODTExport(PandocExport):
outformat = "odt"
class MARKDOWNExport(PandocExport):
outformat = "markdown"
class EPUBExport(PandocExport):
outformat = "epub"
| {
"content_hash": "c913401e15cc3a38749807c815330d66",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 77,
"avg_line_length": 29.457142857142856,
"alnum_prop": 0.5410281280310378,
"repo_name": "theithec/bookhelper",
"id": "4d596a61eed761725d026fc8f237ba4f4e1eb5be",
"size": "5156",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bookhelper/export.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "50809"
}
],
"symlink_target": ""
} |
import unittest2
import pymysql
from pymysql import util
from pymysql.tests import base
from pymysql.constants import CLIENT
class TestNextset(base.PyMySQLTestCase):
def test_nextset(self):
con = self.connect(
init_command='SELECT "bar"; SELECT "baz"',
client_flag=CLIENT.MULTI_STATEMENTS)
cur = con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.connect(client_flag=CLIENT.MULTI_STATEMENTS).cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
def test_nextset_error(self):
con = self.connect(client_flag=CLIENT.MULTI_STATEMENTS)
cur = con.cursor()
for i in range(3):
cur.execute("SELECT %s; xyzzy;", (i,))
self.assertEqual([(i,)], list(cur))
with self.assertRaises(pymysql.ProgrammingError):
cur.nextset()
self.assertEqual((), cur.fetchall())
def test_ok_and_next(self):
cur = self.connect(client_flag=CLIENT.MULTI_STATEMENTS).cursor()
cur.execute("SELECT 1; commit; SELECT 2;")
self.assertEqual([(1,)], list(cur))
self.assertTrue(cur.nextset())
self.assertTrue(cur.nextset())
self.assertEqual([(2,)], list(cur))
self.assertFalse(bool(cur.nextset()))
@unittest2.expectedFailure
def test_multi_cursor(self):
con = self.connect(client_flag=CLIENT.MULTI_STATEMENTS)
cur1 = con.cursor()
cur2 = con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
def test_multi_statement_warnings(self):
con = self.connect(
init_command='SELECT "bar"; SELECT "baz"',
client_flag=CLIENT.MULTI_STATEMENTS)
cursor = con.cursor()
try:
cursor.execute('DROP TABLE IF EXISTS a; '
'DROP TABLE IF EXISTS b;')
except TypeError:
self.fail()
#TODO: How about SSCursor and nextset?
# It's very hard to implement correctly...
| {
"content_hash": "2542a39ee6cebe2e4c2ad98366a155b5",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 72,
"avg_line_length": 30.833333333333332,
"alnum_prop": 0.5857142857142857,
"repo_name": "imron/scalyr-agent-2",
"id": "998441072d80ec85f33d1bb4e65aa6e05b7ec76b",
"size": "2590",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scalyr_agent/third_party/pymysql/tests/test_nextset.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1297"
},
{
"name": "Dockerfile",
"bytes": "1461"
},
{
"name": "Python",
"bytes": "2093708"
}
],
"symlink_target": ""
} |
import json
from django.test.utils import override_settings
from django.test.client import RequestFactory
from allauth.compat import reverse
from allauth.socialaccount.tests import OAuth2TestsMixin
from allauth.tests import MockedResponse, TestCase, patch
from allauth.socialaccount.models import SocialAccount
from allauth.socialaccount import providers
from allauth.account import app_settings as account_settings
from allauth.account.models import EmailAddress
from allauth.utils import get_user_model
from .provider import FacebookProvider
@override_settings(
SOCIALACCOUNT_AUTO_SIGNUP=True,
ACCOUNT_SIGNUP_FORM_CLASS=None,
LOGIN_REDIRECT_URL='/accounts/profile/',
ACCOUNT_EMAIL_VERIFICATION=account_settings
.EmailVerificationMethod.NONE,
SOCIALACCOUNT_PROVIDERS={
'facebook': {
'AUTH_PARAMS': {},
'VERIFIED_EMAIL': False}})
class FacebookTests(OAuth2TestsMixin, TestCase):
provider_id = FacebookProvider.id
facebook_data = """
{
"id": "630595557",
"name": "Raymond Penners",
"first_name": "Raymond",
"last_name": "Penners",
"email": "raymond.penners@gmail.com",
"link": "https://www.facebook.com/raymond.penners",
"username": "raymond.penners",
"birthday": "07/17/1973",
"work": [
{
"employer": {
"id": "204953799537777",
"name": "IntenCT"
}
}
],
"timezone": 1,
"locale": "nl_NL",
"verified": true,
"updated_time": "2012-11-30T20:40:33+0000"
}"""
def get_mocked_response(self, data=None):
if data is None:
data = self.facebook_data
return MockedResponse(200, data)
def test_username_conflict(self):
User = get_user_model()
User.objects.create(username='raymond.penners')
self.login(self.get_mocked_response())
socialaccount = SocialAccount.objects.get(uid='630595557')
self.assertEqual(socialaccount.user.username, 'raymond')
def test_username_based_on_provider(self):
self.login(self.get_mocked_response())
socialaccount = SocialAccount.objects.get(uid='630595557')
self.assertEqual(socialaccount.user.username, 'raymond.penners')
def test_username_based_on_provider_with_simple_name(self):
data = '{"id": "1234567", "name": "Harvey McGillicuddy"}'
self.login(self.get_mocked_response(data=data))
socialaccount = SocialAccount.objects.get(uid='1234567')
self.assertEqual(socialaccount.user.username, 'harvey')
def test_media_js(self):
provider = providers.registry.by_id(FacebookProvider.id)
request = RequestFactory().get(reverse('account_login'))
request.session = {}
script = provider.media_js(request)
self.assertTrue('"appId": "app123id"' in script)
def test_login_by_token(self):
resp = self.client.get(reverse('account_login'))
with patch('allauth.socialaccount.providers.facebook.views'
'.requests') as requests_mock:
mocks = [self.get_mocked_response().json()]
requests_mock.get.return_value.json \
= lambda: mocks.pop()
resp = self.client.post(reverse('facebook_login_by_token'),
data={'access_token': 'dummy'})
self.assertRedirects(resp, 'http://testserver/accounts/profile/',
fetch_redirect_response=False)
@override_settings(
SOCIALACCOUNT_PROVIDERS={
'facebook': {
'AUTH_PARAMS': {'auth_type': 'reauthenticate'},
'VERIFIED_EMAIL': False}})
def test_login_by_token_reauthenticate(self):
resp = self.client.get(reverse('account_login'))
nonce = json.loads(
resp.context['fb_data'])['loginOptions']['auth_nonce']
with patch('allauth.socialaccount.providers.facebook.views'
'.requests') as requests_mock:
mocks = [self.get_mocked_response().json(),
{'auth_nonce': nonce}]
requests_mock.get.return_value.json \
= lambda: mocks.pop()
resp = self.client.post(reverse('facebook_login_by_token'),
data={'access_token': 'dummy'})
self.assertRedirects(resp, 'http://testserver/accounts/profile/',
fetch_redirect_response=False)
@override_settings(
SOCIALACCOUNT_PROVIDERS={
'facebook': {
'VERIFIED_EMAIL': True}})
def test_login_verified(self):
emailaddress = self._login_verified()
self.assertTrue(emailaddress.verified)
def test_login_unverified(self):
emailaddress = self._login_verified()
self.assertFalse(emailaddress.verified)
def _login_verified(self):
self.login(self.get_mocked_response())
return EmailAddress.objects.get(email='raymond.penners@gmail.com')
| {
"content_hash": "894680f03a1a7952dd8941ee66ce8d0c",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 77,
"avg_line_length": 39.33587786259542,
"alnum_prop": 0.6056666019794295,
"repo_name": "wli/django-allauth",
"id": "95ca86616445793c1512314b3644f088029c114a",
"size": "5153",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "allauth/socialaccount/providers/facebook/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "42101"
},
{
"name": "JavaScript",
"bytes": "3967"
},
{
"name": "Makefile",
"bytes": "298"
},
{
"name": "Python",
"bytes": "588192"
}
],
"symlink_target": ""
} |
import re
from wadl2rst import table
from wadl2rst.nodes.base import BaseNode
from wadl2rst.templates import templates
FILENAME_TITLE = re.compile(r" |/")
FILENAME_PATH = re.compile(r"/|{|}")
FILENAME_UNDERSCORES = re.compile(r"[_]+")
PARAMS_YAML = re.compile(r"/s")
PARAMS_CLEANUP = re.compile(r"\(.*?\)")
class MethodNode(BaseNode):
template = templates['method']
document_node_names = ["wadl:doc", "doc"]
para_names = ["para", "p", "db:para", "xhtml:p"]
# TODO(auggy): should this take params as an argument?
# self.template_params() is called twice, once here and
# once after this returns to get the filename
def to_rst(self, book_title):
""" Return the rst representation of this tag and it's children. """
# we were not able to resolve this method, so skip it
if "href" in self.attributes:
print "Unresolved method {}".format(self.attributes['href'])
return ""
params = self.template_params()
try:
return self.template.render(book_title=book_title, **params)
except Exception, e:
print params
raise e
def template_params(self):
resource_node = None
responses_node = None
request_node = None
document_node = None
short_desc_node = None
try:
resource_node = self.find_first("resource")
except Exception:
pass
try:
responses_node = self.find_first("responses")
except Exception:
pass
try:
request_node = self.find_first("request")
except Exception:
pass
try:
document_node = self.find_one_of(self.document_node_names)
short_desc_node = document_node.find_one_of(self.para_names)
except Exception:
# we handle failures here below
pass
output = {
"body_table": u'',
"docs_rst": "",
"filename": "",
"http_method": self.attributes.get("name", ''),
"in_params": None,
"params_yaml" : u'',
"query_table": None,
"request_examples": [],
"responses_table": None,
"response_examples": [],
"short_desc": "",
"title": "",
"uri_table": u'',
"uri": "",
}
if document_node is not None:
output['docs_rst'] = document_node.to_rst()
output['title'] = document_node.attributes.get("title", '').title()
# create the params yaml filename
# params_yaml = PARAMS_CLEANUP.sub("", output['title'])
# params_yaml = PARAMS_YAML.sub("", params_yaml).split()
# params_yaml[0] = params_yaml[0].lower()
# output['params_yaml'] = ''.join(params_yaml) + '.yaml'
output['params_yaml'] = 'parameters.yaml'
if short_desc_node is not None:
output['short_desc'] = short_desc_node.to_rst()
# setup the resource node stuff
if resource_node is not None:
output['uri'] = resource_node.attributes.get("full_path", '')
uri_params = resource_node.find_first("params")
if uri_params is not None:
output['uri_table'] = uri_params.to_keypairs()
# setup some request node stuff
if request_node is not None:
request_params = request_node.find_first("params")
if request_params is not None:
output['query_table'] = request_params.to_keypairs("query")
output['body_table'] = request_params.to_keypairs("plain")
# stash any request examples
representations = request_node.find("representation")
for representation in representations:
example = representation.to_example()
if example is not None:
output['request_examples'].append(example)
# set up in params
output['in_params'] = output['uri_table'] + output['body_table']
# setup the reponses node stuff
if responses_node is not None:
response_params = responses_node.find_first("params")
# stash the responses table
if response_params is not None:
output['response_table'] = response_params.to_keypairs("plain")
# handle responses nodes
responses = [self.get_response_info(child) for child in responses_node.children]
# output['responses_table'] = self.get_responses_table(responses)
output['error_responses'] = self.get_error_responses_list(responses)
output['normal_responses'] = self.get_normal_responses_list(responses)
# stash any response examples
representations = responses_node.find("representation")
for representation in representations:
example = representation.to_example()
if example is not None:
output['response_examples'].append(example)
# create the filename
output['filename'] = self.get_filename(output, 'rst')
return output
def get_filename(self, data=None, extention="rst"):
http_method = data['http_method']
title = FILENAME_TITLE.sub("_", data['title'].lower())
uri = FILENAME_PATH.sub("_", data['uri'].lower())
output = "{}_{}_{}.{}".format(
http_method,
title,
uri,
extention
)
output = FILENAME_UNDERSCORES.sub("_", output)
return output
def get_responses_table(self, responses):
columns = ["Response Code", "Name", "Description"]
return table.create_table(columns, responses)
def get_error_responses_list(self, responses):
# TODO(auggy): actually implement this, looks like it's not getting into the data
return u'''computeFault(400, 500), serviceUnavailable(503), badRequest(400),
unauthorized(401), forbidden(403), badMethod(405), itemNotFound(404)'''
def get_normal_responses_list(self, responses):
normal_responses = [i[0] for i in responses]
return ','.join(normal_responses)
def get_response_info(self, node):
clone = node.clone()
reps = [child for child in clone.children if child.name == "representation"]
for rep in reps:
clone.remove_child(rep)
title = ""
try:
doc_node = node.find_one_of(self.document_node_names)
title = doc_node.attributes.get('title', '')
except ValueError, e:
title = ""
return [
node.attributes.get('status', ''),
title,
clone.to_rst()
]
| {
"content_hash": "496f57fa58baf07c3d22c2f1cbb32575",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 92,
"avg_line_length": 33.82089552238806,
"alnum_prop": 0.5672256546042954,
"repo_name": "annegentle/wadl2rst",
"id": "906c99b816b7ccb624ac9f43f56268f367a0b311",
"size": "6799",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wadl2rst/nodes/method.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "26080"
},
{
"name": "HTML",
"bytes": "1362"
},
{
"name": "JavaScript",
"bytes": "1273"
},
{
"name": "Python",
"bytes": "62349"
}
],
"symlink_target": ""
} |
from app import myapp, models
from flask import render_template, Flask, redirect, url_for, session, request, jsonify, flash
from flask_oauthlib.client import OAuth
from .forms import LoginForm, SignUpForm
app = Flask(__name__)
app.config.from_object('config')
oauth = OAuth()
linkedin = oauth.remote_app(
'linkedIn',
consumer_key='86faisvke7rqht',
consumer_secret='vfywuq3lwEUUqzU2',
request_token_params={
'scope': 'r_basicprofile',
'state': 'RandomString',
},
base_url='https://api.linkedin.com/v1/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://www.linkedin.com/uas/oauth2/accessToken',
authorize_url='https://www.linkedin.com/uas/oauth2/authorization',
)
@myapp.route('/')
@myapp.route('/index')
def index():
if 'linkedin_token' in session:
print("in session")
me = linkedin.get('people/~')
print(jsonify(me.data))
return jsonify(me.data)
return redirect(url_for('login'))
# ------------ User Session Management ------------
# login
@myapp.route('/login', methods=['GET', 'POST'])
def login():
user = ''
error = None
# if already logged in, redirect to the trips overview
if 'user' in session:
user = escape(session['user'])
return redirect('/main')
else: # login
form = LoginForm()
if form.validate_on_submit():
error = None
# user input
email = form.email.data
pwd = form.insecure_password.data
# return user first name only if email, pwd match DB record
user = models.validate_user(email, pwd)
if user is not None:
flash('Logging in')
session['user'] = user
session['email'] = email
return redirect('/main')
else:
error = 'Invalid credentials'
return render_template('login.html', error = error, form = form)
# @myapp.route('/')
# @myapp.route('/index')
# def index():
# print("YAY")
# return redirect(url_for('/login'))
# @app.route('/')
# @myapp.route('/index')
# def index():
# print("YAY")
# if 'linkedin_token' in session:
# print("in session")
# me = linkedin.get('people/~')
# print(jsonify(me.data))
# return jsonify(me.data)
# return redirect(url_for('/login'))
# @app.route('/login')
# def login():
# return linkedin.authorize(callback=url_for('authorized', _external=True))
# @app.route('/logout')
# def logout():
# session.pop('linkedin_token', None)
# return redirect(url_for('/index'))
# @app.route('/login/authorized')
# def authorized():
# resp = linkedin.authorized_response()
# if resp is None:
# return 'Access denied: reason=%s error=%s' % (
# request.args['error_reason'],
# request.args['error_description']
# )
# session['linkedin_token'] = (resp['access_token'], '')
# me = linkedin.get('people/~')
# return jsonify(me.data)
# @linkedin.tokengetter
# def get_linkedin_oauth_token():
# return session.get('linkedin_token')
# def change_linkedin_query(uri, headers, body):
# auth = headers.pop('Authorization')
# headers['x-li-format'] = 'json'
# if auth:
# auth = auth.replace('Bearer', '').strip()
# if '?' in uri:
# uri += '&oauth2_access_token=' + auth
# else:
# uri += '?oauth2_access_token=' + auth
# return uri, headers, body
# linkedin.pre_request = change_linkedin_query
# if __name__ == '__main__':
# app.run() | {
"content_hash": "6e84a31709c8f6a37eae59a0dc99d2c4",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 93,
"avg_line_length": 26.643939393939394,
"alnum_prop": 0.6013647995450668,
"repo_name": "lizlee0225/IschoolJourney",
"id": "1edabdf0659ea166c368691563b04d40b87adc2b",
"size": "3517",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ISchool_Journey/app/views1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "312604"
},
{
"name": "HTML",
"bytes": "48977"
},
{
"name": "JavaScript",
"bytes": "632082"
},
{
"name": "Python",
"bytes": "29475"
}
],
"symlink_target": ""
} |
import pytest
from seafileapi.exceptions import DoesNotExist
from tests.utils import randstring
def test_create_delete_repo(client):
repo = _create_repo(client)
repo.delete()
with pytest.raises(DoesNotExist):
client.repos.get_repo(repo.id)
def test_create_encrypted_repo(client):
repo = _create_repo(client, password=randstring())
repo.delete()
with pytest.raises(DoesNotExist):
client.repos.get_repo(repo.id)
def test_list_repos(client):
repos = client.repos.list_repos()
for repo in repos:
assert len(repo.id) == 36
def _create_repo(client, password=None):
repo_name = '测试资料库-%s' % randstring()
repo_desc = '一个测试资料库-%s' % randstring()
repo = client.repos.create_repo(repo_name, repo_desc, password=password)
assert repo.name == repo_name
assert repo.desc == repo_desc
assert len(repo.id) == 36
assert repo.encrypted == (password is not None)
assert repo.owner == 'self'
return repo
| {
"content_hash": "a57425643a044c638152d8bf527016e7",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 76,
"avg_line_length": 28.114285714285714,
"alnum_prop": 0.6798780487804879,
"repo_name": "qk4l/python-seafile",
"id": "907f564ef20adc1846cfe357312a7eab7092bbf2",
"size": "1024",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_repos.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20609"
},
{
"name": "Shell",
"bytes": "1017"
}
],
"symlink_target": ""
} |
"""
will merge into userguild
"""
import pymongo
from pymongo import MongoClient
from wowlib import wowapi
# connection informattion
client = MongoClient()
db = client.wow
userdb = db.users
# create list of unique guilds
guildlist = []
user_list = []
for guild in userdb.find({"user": {'$regex' : ".* - Blackwater Raiders.*"}}):
if guild['guild'] in guildlist:
pass
elif guild['guild']=="_None_":
pass
else:
guildlist.append(guild['guild'])
#print(len(guildlist))
count = 0
for guild in guildlist:
try:
record = wowapi.guild_query(guild,'Blackwater Raiders')
try:
for i, member in enumerate(record['members']):
if 'spec' in record['members'][i]['character']:
user_list.append((record['members'][i]['character']['name']+" - "+record['members'][i]['character']['realm']))
user = (record['members'][i]['character']['name']+" - "+record['members'][i]['character']['realm'])
newuser = {'guild':guild,
'user':user,
'lvl':record['members'][i]['character']['level'],
'spec':record['members'][i]['character']['spec']['name'],
'class':record['members'][i]['character']['class'],
'gender':record['members'][i]['character']['gender'],
'role':record['members'][i]['character']['spec']['role'],
'side':record['side']
}
try:
#<----------------temp to reschema------------->
userdb.update({'user':user},
{'$set':
{'user':user,
'lvl':newuser['lvl'],
'guild':guild,
'side':newuser['side'],
'gender':newuser['gender'],
'role':newuser['role'],
'class':newuser['class'],
'spec':newuser['spec']}},
upsert=True)
print("added " +user)
'''
posts.update({'user':user},
{'$set':
{'user':user,
'lvl':newuser['lvl'],
'guild':guild,
'side':}},
upsert=True)
print("added " +user)
'''
except:print("Failed on "+user)
except Exception as e:
print("failed API call "+ guild)
print (e)
print (user)
except:print("Failed" + guild)
# print("Could not print :"+guild)
print ("Total Number of Users :" + str(len(user_list)))
#Output
#print('Number of distinct users :'+ str(numusers))
'''
username =(player['user'].split(" - ")[0])
server = player['user'].split(" - ")[1]
pid = player['user']
try:
playerinfo = wowapi.char_query(username,server)
#print(guild)
try:
guild = playerinfo['guild']['name']
print(guild)
except: guild = "_None_"
lvl = playerinfo['level']
posts.update_one({'user':pid},{'$set':{'guild': guild, 'lvl':lvl}})
print("updated")
except: print("Pass")
print("Known user list built")
'''
| {
"content_hash": "7be0f299672823aeee2b66034d701f6d",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 130,
"avg_line_length": 26.402985074626866,
"alnum_prop": 0.4347088750706614,
"repo_name": "henkhaus/wow",
"id": "b0c794628cf753d443bcb2ff885877ce6a3376c2",
"size": "3538",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "guildsearch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "109"
},
{
"name": "Jupyter Notebook",
"bytes": "73626"
},
{
"name": "Python",
"bytes": "54924"
},
{
"name": "Shell",
"bytes": "91"
}
],
"symlink_target": ""
} |
import tensorflow as tf
from agent.framework import Framework
from agent.main import Agent
from emulator.main import Account
# env = Account()
# state = env.reset()
#
# agent = Framework()
# sess = tf.Session()
# sess.run(tf.global_variables_initializer())
#
#
# while True:
# action = agent.get_stochastic_policy(sess, state)
# next_state, reward, done = env.step(action)
# agent.update_cache(state, action, reward, next_state, done)
# state = next_state
# if done:
# break
#
# agent.update_value_net(sess)
# agent.update_target_net(sess)
env = Account()
state = env.reset()
agent = Agent()
while True:
action = agent.get_stochastic_policy(state)
next_state, reward, done = env.step(action)
agent.update_cache(state, action, reward, next_state, done)
state = next_state
if done:
break
agent.update_target()
agent.update_eval()
agent.save_model()
agent.restore_model()
| {
"content_hash": "aea14c34cbae55aca14af5a4af11108d",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 65,
"avg_line_length": 19.520833333333332,
"alnum_prop": 0.6744930629669157,
"repo_name": "AlphaSmartDog/DeepLearningNotes",
"id": "5c80120a3e6a53981f7aebbf23b800207dbf70ff",
"size": "937",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Note-5 DQN与HS300指数择时/D3QN_Scale/test_agent.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "76094"
},
{
"name": "Jupyter Notebook",
"bytes": "9463332"
},
{
"name": "Python",
"bytes": "8832736"
}
],
"symlink_target": ""
} |
from PIL import Image, ImageFile, ImagePalette, \
ImageChops, ImageSequence, _binary
__version__ = "0.9"
# --------------------------------------------------------------------
# Helpers
i8 = _binary.i8
i16 = _binary.i16le
o8 = _binary.o8
o16 = _binary.o16le
# --------------------------------------------------------------------
# Identify/read GIF files
def _accept(prefix):
return prefix[:6] in [b"GIF87a", b"GIF89a"]
##
# Image plugin for GIF images. This plugin supports both GIF87 and
# GIF89 images.
class GifImageFile(ImageFile.ImageFile):
format = "GIF"
format_description = "Compuserve GIF"
global_palette = None
def data(self):
s = self.fp.read(1)
if s and i8(s):
return self.fp.read(i8(s))
return None
def _open(self):
# Screen
s = self.fp.read(13)
if s[:6] not in [b"GIF87a", b"GIF89a"]:
raise SyntaxError("not a GIF file")
self.info["version"] = s[:6]
self.size = i16(s[6:]), i16(s[8:])
self.tile = []
flags = i8(s[10])
bits = (flags & 7) + 1
if flags & 128:
# get global palette
self.info["background"] = i8(s[11])
# check if palette contains colour indices
p = self.fp.read(3 << bits)
for i in range(0, len(p), 3):
if not (i//3 == i8(p[i]) == i8(p[i+1]) == i8(p[i+2])):
p = ImagePalette.raw("RGB", p)
self.global_palette = self.palette = p
break
self.__fp = self.fp # FIXME: hack
self.__rewind = self.fp.tell()
self._n_frames = None
self._is_animated = None
self._seek(0) # get ready to read first frame
@property
def n_frames(self):
if self._n_frames is None:
current = self.tell()
try:
while True:
self.seek(self.tell() + 1)
except EOFError:
self._n_frames = self.tell() + 1
self.seek(current)
return self._n_frames
@property
def is_animated(self):
if self._is_animated is None:
current = self.tell()
try:
self.seek(1)
self._is_animated = True
except EOFError:
self._is_animated = False
self.seek(current)
return self._is_animated
def seek(self, frame):
if frame == self.__frame:
return
if frame < self.__frame:
self._seek(0)
last_frame = self.__frame
for f in range(self.__frame + 1, frame + 1):
try:
self._seek(f)
except EOFError:
self.seek(last_frame)
raise EOFError("no more images in GIF file")
def _seek(self, frame):
if frame == 0:
# rewind
self.__offset = 0
self.dispose = None
self.dispose_extent = [0, 0, 0, 0] # x0, y0, x1, y1
self.__frame = -1
self.__fp.seek(self.__rewind)
self._prev_im = None
self.disposal_method = 0
else:
# ensure that the previous frame was loaded
if not self.im:
self.load()
if frame != self.__frame + 1:
raise ValueError("cannot seek to frame %d" % frame)
self.__frame = frame
self.tile = []
self.fp = self.__fp
if self.__offset:
# backup to last frame
self.fp.seek(self.__offset)
while self.data():
pass
self.__offset = 0
if self.dispose:
self.im.paste(self.dispose, self.dispose_extent)
from copy import copy
self.palette = copy(self.global_palette)
while True:
s = self.fp.read(1)
if not s or s == b";":
break
elif s == b"!":
#
# extensions
#
s = self.fp.read(1)
block = self.data()
if i8(s) == 249:
#
# graphic control extension
#
flags = i8(block[0])
if flags & 1:
self.info["transparency"] = i8(block[3])
self.info["duration"] = i16(block[1:3]) * 10
# disposal method - find the value of bits 4 - 6
dispose_bits = 0b00011100 & flags
dispose_bits = dispose_bits >> 2
if dispose_bits:
# only set the dispose if it is not
# unspecified. I'm not sure if this is
# correct, but it seems to prevent the last
# frame from looking odd for some animations
self.disposal_method = dispose_bits
elif i8(s) == 254:
#
# comment extension
#
self.info["comment"] = block
elif i8(s) == 255:
#
# application extension
#
self.info["extension"] = block, self.fp.tell()
if block[:11] == b"NETSCAPE2.0":
block = self.data()
if len(block) >= 3 and i8(block[0]) == 1:
self.info["loop"] = i16(block[1:3])
while self.data():
pass
elif s == b",":
#
# local image
#
s = self.fp.read(9)
# extent
x0, y0 = i16(s[0:]), i16(s[2:])
x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:])
self.dispose_extent = x0, y0, x1, y1
flags = i8(s[8])
interlace = (flags & 64) != 0
if flags & 128:
bits = (flags & 7) + 1
self.palette =\
ImagePalette.raw("RGB", self.fp.read(3 << bits))
# image data
bits = i8(self.fp.read(1))
self.__offset = self.fp.tell()
self.tile = [("gif",
(x0, y0, x1, y1),
self.__offset,
(bits, interlace))]
break
else:
pass
# raise IOError, "illegal GIF tag `%x`" % i8(s)
try:
if self.disposal_method < 2:
# do not dispose or none specified
self.dispose = None
elif self.disposal_method == 2:
# replace with background colour
self.dispose = Image.core.fill("P", self.size,
self.info["background"])
else:
# replace with previous contents
if self.im:
self.dispose = self.im.copy()
# only dispose the extent in this frame
if self.dispose:
self.dispose = self.dispose.crop(self.dispose_extent)
except (AttributeError, KeyError):
pass
if not self.tile:
# self.__fp = None
raise EOFError
self.mode = "L"
if self.palette:
self.mode = "P"
def tell(self):
return self.__frame
def load_end(self):
ImageFile.ImageFile.load_end(self)
# if the disposal method is 'do not dispose', transparent
# pixels should show the content of the previous frame
if self._prev_im and self.disposal_method == 1:
# we do this by pasting the updated area onto the previous
# frame which we then use as the current image content
updated = self.im.crop(self.dispose_extent)
self._prev_im.paste(updated, self.dispose_extent,
updated.convert('RGBA'))
self.im = self._prev_im
self._prev_im = self.im.copy()
# --------------------------------------------------------------------
# Write GIF files
try:
import _imaging_gif
except ImportError:
_imaging_gif = None
RAWMODE = {
"1": "L",
"L": "L",
"P": "P",
}
def _convert_mode(im, initial_call=False):
# convert on the fly (EXPERIMENTAL -- I'm not sure PIL
# should automatically convert images on save...)
if Image.getmodebase(im.mode) == "RGB":
if initial_call:
palette_size = 256
if im.palette:
palette_size = len(im.palette.getdata()[1]) // 3
return im.convert("P", palette=1, colors=palette_size)
else:
return im.convert("P")
return im.convert("L")
def _save_all(im, fp, filename):
_save(im, fp, filename, save_all=True)
def _save(im, fp, filename, save_all=False):
im.encoderinfo.update(im.info)
if _imaging_gif:
# call external driver
try:
_imaging_gif.save(im, fp, filename)
return
except IOError:
pass # write uncompressed file
if im.mode in RAWMODE:
im_out = im.copy()
else:
im_out = _convert_mode(im, True)
# header
try:
palette = im.encoderinfo["palette"]
except KeyError:
palette = None
im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True)
if save_all:
previous = None
first_frame = None
for im_frame in ImageSequence.Iterator(im):
im_frame = _convert_mode(im_frame)
# To specify duration, add the time in milliseconds to getdata(),
# e.g. getdata(im_frame, duration=1000)
if not previous:
# global header
first_frame = getheader(im_frame, palette, im.encoderinfo)[0]
first_frame += getdata(im_frame, (0, 0), **im.encoderinfo)
else:
if first_frame:
for s in first_frame:
fp.write(s)
first_frame = None
# delta frame
delta = ImageChops.subtract_modulo(im_frame, previous.copy())
bbox = delta.getbbox()
if bbox:
# compress difference
for s in getdata(im_frame.crop(bbox),
bbox[:2], **im.encoderinfo):
fp.write(s)
else:
# FIXME: what should we do in this case?
pass
previous = im_frame
if first_frame:
save_all = False
if not save_all:
header = getheader(im_out, palette, im.encoderinfo)[0]
for s in header:
fp.write(s)
flags = 0
if get_interlace(im):
flags = flags | 64
# local image header
_get_local_header(fp, im, (0, 0), flags)
im_out.encoderconfig = (8, get_interlace(im))
ImageFile._save(im_out, fp, [("gif", (0, 0)+im.size, 0,
RAWMODE[im_out.mode])])
fp.write(b"\0") # end of image data
fp.write(b";") # end of file
if hasattr(fp, "flush"):
fp.flush()
def get_interlace(im):
try:
interlace = im.encoderinfo["interlace"]
except KeyError:
interlace = 1
# workaround for @PIL153
if min(im.size) < 16:
interlace = 0
return interlace
def _get_local_header(fp, im, offset, flags):
transparent_color_exists = False
try:
transparency = im.encoderinfo["transparency"]
except KeyError:
pass
else:
transparency = int(transparency)
# optimize the block away if transparent color is not used
transparent_color_exists = True
if _get_optimize(im, im.encoderinfo):
used_palette_colors = _get_used_palette_colors(im)
# adjust the transparency index after optimize
if len(used_palette_colors) < 256:
for i in range(len(used_palette_colors)):
if used_palette_colors[i] == transparency:
transparency = i
transparent_color_exists = True
break
else:
transparent_color_exists = False
if "duration" in im.encoderinfo:
duration = int(im.encoderinfo["duration"] / 10)
else:
duration = 0
if transparent_color_exists or duration != 0:
transparency_flag = 1 if transparent_color_exists else 0
if not transparent_color_exists:
transparency = 0
fp.write(b"!" +
o8(249) + # extension intro
o8(4) + # length
o8(transparency_flag) + # transparency info present
o16(duration) + # duration
o8(transparency) + # transparency index
o8(0))
if "comment" in im.encoderinfo and 1 <= len(im.encoderinfo["comment"]) <= 255:
fp.write(b"!" +
o8(254) + # extension intro
o8(len(im.encoderinfo["comment"])) +
im.encoderinfo["comment"] +
o8(0))
if "loop" in im.encoderinfo:
number_of_loops = im.encoderinfo["loop"]
fp.write(b"!" +
o8(255) + # extension intro
o8(11) +
b"NETSCAPE2.0" +
o8(3) +
o8(1) +
o16(number_of_loops) + # number of loops
o8(0))
fp.write(b"," +
o16(offset[0]) + # offset
o16(offset[1]) +
o16(im.size[0]) + # size
o16(im.size[1]) +
o8(flags) + # flags
o8(8)) # bits
def _save_netpbm(im, fp, filename):
#
# If you need real GIF compression and/or RGB quantization, you
# can use the external NETPBM/PBMPLUS utilities. See comments
# below for information on how to enable this.
import os
from subprocess import Popen, check_call, PIPE, CalledProcessError
import tempfile
file = im._dump()
if im.mode != "RGB":
with open(filename, 'wb') as f:
stderr = tempfile.TemporaryFile()
check_call(["ppmtogif", file], stdout=f, stderr=stderr)
else:
with open(filename, 'wb') as f:
# Pipe ppmquant output into ppmtogif
# "ppmquant 256 %s | ppmtogif > %s" % (file, filename)
quant_cmd = ["ppmquant", "256", file]
togif_cmd = ["ppmtogif"]
stderr = tempfile.TemporaryFile()
quant_proc = Popen(quant_cmd, stdout=PIPE, stderr=stderr)
stderr = tempfile.TemporaryFile()
togif_proc = Popen(togif_cmd, stdin=quant_proc.stdout, stdout=f,
stderr=stderr)
# Allow ppmquant to receive SIGPIPE if ppmtogif exits
quant_proc.stdout.close()
retcode = quant_proc.wait()
if retcode:
raise CalledProcessError(retcode, quant_cmd)
retcode = togif_proc.wait()
if retcode:
raise CalledProcessError(retcode, togif_cmd)
try:
os.unlink(file)
except OSError:
pass
# --------------------------------------------------------------------
# GIF utilities
def _get_optimize(im, info):
return im.mode in ("P", "L") and info and info.get("optimize", 0)
def _get_used_palette_colors(im):
used_palette_colors = []
# check which colors are used
i = 0
for count in im.histogram():
if count:
used_palette_colors.append(i)
i += 1
return used_palette_colors
def getheader(im, palette=None, info=None):
"""Return a list of strings representing a GIF header"""
# Header Block
# http://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp
version = b"87a"
for extensionKey in ["transparency", "duration", "loop", "comment"]:
if info and extensionKey in info:
if ((extensionKey == "duration" and info[extensionKey] == 0) or
(extensionKey == "comment" and not (1 <= len(info[extensionKey]) <= 255))):
continue
version = b"89a"
break
else:
if im.info.get("version") == "89a":
version = b"89a"
header = [
b"GIF"+version + # signature + version
o16(im.size[0]) + # canvas width
o16(im.size[1]) # canvas height
]
if im.mode == "P":
if palette and isinstance(palette, bytes):
source_palette = palette[:768]
else:
source_palette = im.im.getpalette("RGB")[:768]
else: # L-mode
if palette and isinstance(palette, bytes):
source_palette = palette[:768]
else:
source_palette = bytearray([i//3 for i in range(768)])
used_palette_colors = palette_bytes = None
if _get_optimize(im, info):
used_palette_colors = _get_used_palette_colors(im)
# create the new palette if not every color is used
if len(used_palette_colors) < 256:
palette_bytes = b""
new_positions = {}
i = 0
# pick only the used colors from the palette
for oldPosition in used_palette_colors:
palette_bytes += source_palette[oldPosition*3:oldPosition*3+3]
new_positions[oldPosition] = i
i += 1
# replace the palette color id of all pixel with the new id
image_bytes = bytearray(im.tobytes())
for i in range(len(image_bytes)):
image_bytes[i] = new_positions[image_bytes[i]]
im.frombytes(bytes(image_bytes))
new_palette_bytes = (palette_bytes +
(768 - len(palette_bytes)) * b'\x00')
im.putpalette(new_palette_bytes)
im.palette = ImagePalette.ImagePalette("RGB",
palette=palette_bytes,
size=len(palette_bytes))
if not palette_bytes:
palette_bytes = source_palette
# Logical Screen Descriptor
# calculate the palette size for the header
import math
color_table_size = int(math.ceil(math.log(len(palette_bytes)//3, 2)))-1
if color_table_size < 0:
color_table_size = 0
# size of global color table + global color table flag
header.append(o8(color_table_size + 128))
# background + reserved/aspect
if info and "background" in info:
background = info["background"]
elif "background" in im.info:
# This elif is redundant within GifImagePlugin
# since im.info parameters are bundled into the info dictionary
# However, external scripts may call getheader directly
# So this maintains earlier behaviour
background = im.info["background"]
else:
background = 0
header.append(o8(background) + o8(0))
# end of Logical Screen Descriptor
# add the missing amount of bytes
# the palette has to be 2<<n in size
actual_target_size_diff = (2 << color_table_size) - len(palette_bytes)//3
if actual_target_size_diff > 0:
palette_bytes += o8(0) * 3 * actual_target_size_diff
# Header + Logical Screen Descriptor + Global Color Table
header.append(palette_bytes)
return header, used_palette_colors
def getdata(im, offset=(0, 0), **params):
"""Return a list of strings representing this image.
The first string is a local image header, the rest contains
encoded image data."""
class Collector(object):
data = []
def write(self, data):
self.data.append(data)
im.load() # make sure raster data is available
fp = Collector()
try:
im.encoderinfo = params
# local image header
_get_local_header(fp, im, offset, 0)
ImageFile._save(im, fp, [("gif", (0, 0)+im.size, 0, RAWMODE[im.mode])])
fp.write(b"\0") # end of image data
finally:
del im.encoderinfo
return fp.data
# --------------------------------------------------------------------
# Registry
Image.register_open(GifImageFile.format, GifImageFile, _accept)
Image.register_save(GifImageFile.format, _save)
Image.register_save_all(GifImageFile.format, _save_all)
Image.register_extension(GifImageFile.format, ".gif")
Image.register_mime(GifImageFile.format, "image/gif")
#
# Uncomment the following line if you wish to use NETPBM/PBMPLUS
# instead of the built-in "uncompressed" GIF encoder
# Image.register_save(GifImageFile.format, _save_netpbm)
| {
"content_hash": "5ed6ff89e985ca1f8622d3dcfea222dc",
"timestamp": "",
"source": "github",
"line_count": 672,
"max_line_length": 91,
"avg_line_length": 31.41220238095238,
"alnum_prop": 0.49732341655218154,
"repo_name": "zhanqxun/cv_fish",
"id": "6bca4dd03fa3d955cf37743d344c991c1e5182ef",
"size": "21975",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "PIL/GifImagePlugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "2209"
},
{
"name": "C",
"bytes": "306616"
},
{
"name": "C++",
"bytes": "85075"
},
{
"name": "FORTRAN",
"bytes": "3200"
},
{
"name": "HTML",
"bytes": "68199"
},
{
"name": "JavaScript",
"bytes": "1701"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Python",
"bytes": "9775078"
},
{
"name": "Visual Basic",
"bytes": "949"
},
{
"name": "XSLT",
"bytes": "2113"
}
],
"symlink_target": ""
} |
from ngo.template import render
def home(request):
return render(request, 'app2/home.html')
def hello(request, name):
context = {
'name': name
}
return render(request, 'app2/hello.html', context) | {
"content_hash": "9a13c556387696c49e7dda45727bb492",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 54,
"avg_line_length": 23,
"alnum_prop": 0.6304347826086957,
"repo_name": "naritotakizawa/ngo",
"id": "e00c1f5995db037d291c621fe15ac1e52ee9d3c2",
"size": "230",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/project1/app2/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "74"
},
{
"name": "HTML",
"bytes": "1782"
},
{
"name": "JavaScript",
"bytes": "34"
},
{
"name": "Python",
"bytes": "81541"
}
],
"symlink_target": ""
} |
"""
mixer.main
~~~~~~~~~~
This module implements the objects generation.
:copyright: 2013 by Kirill Klenov.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
import datetime
from copy import deepcopy
import decimal
from importlib import import_module
from collections import defaultdict
from types import GeneratorType
from . import generators as g, fakers as f, mix_types as t
from . import six
NO_VALUE = object()
class Mix(object):
""" Virtual link on the mixed object.
::
mixer = Mixer()
# here `mixer.mix` points on a generated `User` instance
user = mixer.blend(User, username=mixer.mix.first_name)
# here `mixer.mix` points on a generated `Message.author` instance
message = mixer.blend(Message, author__name=mixer.mix.login)
# Mixer mix can get a function
message = mixer.blend(Message, title=mixer.mix.author(
lambda author: 'Author: %s' % author.name
))
"""
def __init__(self, value=None, parent=None):
self.__value = value
self.__parent = parent
self.__func = None
def __getattr__(self, value):
return Mix(value, self if self.__value else None)
def __call__(self, func):
self.__func = func
return self
def __and__(self, value):
if self.__parent:
value = self.__parent & value
value = getattr(value, self.__value)
if self.__func:
return self.__func(value)
return value
def __str__(self):
return '%s/%s' % (self.__value, str(self.__parent or ''))
def __repr__(self):
return '<Mix %s>' % str(self)
class ServiceValue(object):
""" Abstract class for mixer values. """
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
@classmethod
def __call__(cls, *args, **kwargs):
return cls(*args, **kwargs)
def gen_value(self, type_mixer, *args, **kwargs):
""" Abstract method for value generation. """
raise NotImplementedError
class Field(ServiceValue):
""" Set field values.
By default the mixer generates random or fake a field values by types
of them. But you can set some values by manual.
::
# Generate a User model
mixer.blend(User)
# Generate with some values
mixer.blend(User, name='John Connor')
.. note:: Value may be a callable or instance of generator.
::
# Value may be callable
client = mixer.blend(Client, username=lambda:'callable_value')
assert client.username == 'callable_value'
# Value may be a generator
clients = mixer.cycle(4).blend(
Client, username=(name for name in ('Piter', 'John')))
.. seealso:: :class:`mixer.main.Fake`, :class:`mixer.main.Random`,
:class:`mixer.main.Select`,
:meth:`mixer.main.Mixer.sequence`
"""
is_relation = False
def __init__(self, scheme, name):
self.scheme = scheme
self.name = name
def __deepcopy__(self, memo):
return Field(self.scheme, self.name)
def gen_value(self, type_mixer, *args, **kwargs):
""" Call :meth:`TypeMixer.gen_field`.
:return value: A generated value
"""
return type_mixer.gen_field(*args, **kwargs)
class Relation(Field):
""" Generate a relation values.
Some fields from a model could be a relation on other models.
Mixer can generate this fields as well, but you can force some
values for generated models. Use `__` for relation values.
::
message = mixer.blend(Message, client__username='test2')
assert message.client.username == 'test2'
# more hard relation
message = mixer.blend(Message, client__role__name='admin')
assert message.client.role.name == 'admin'
"""
is_relation = True
def __init__(self, scheme, name, params=None):
super(Relation, self).__init__(scheme, name)
self.params = params or dict()
def __deepcopy__(self, memo):
return Relation(self.scheme, self.name, deepcopy(self.params))
def gen_value(self, type_mixer, *args, **kwargs):
""" Call :meth:`TypeMixer.gen_value`.
:return value: A generated value
"""
return type_mixer.gen_relation(*args, **kwargs)
# Service classes
class Fake(ServiceValue):
""" Force a `fake` value.
If you initialized a :class:`~mixer.main.Mixer` with `fake=False` you can
force a `fake` value for field with this attribute (mixer.fake).
::
mixer = Mixer(fake=False)
user = mixer.blend(User)
print user.name # Some like: Fdjw4das
user = mixer.blend(User, name=mixer.fake)
print user.name # Some like: Bob Marley
You can setup a field type for generation of fake value: ::
user = mixer.blend(User, score=mixer.fake(str))
print user.score # Some like: Bob Marley
.. note:: This is also usefull on ORM model generation for filling a fields
with default values (or null).
::
from mixer.backend.django import mixer
user = mixer.blend('auth.User', first_name=mixer.fake)
print user.first_name # Some like: John
"""
def gen_value(self, type_mixer, *args, **kwargs):
""" Call :meth:`TypeMixer.gen_fake`.
:return value: A generated value
"""
return type_mixer.gen_fake(*args, **kwargs)
class Random(ServiceValue):
""" Force a `random` value.
If you initialized a :class:`~mixer.main.Mixer` by default mixer try to
fill fields with `fake` data. You can user `mixer.random` for prevent this
behaviour for a custom fields.
::
mixer = Mixer()
user = mixer.blend(User)
print user.name # Some like: Bob Marley
user = mixer.blend(User, name=mixer.random)
print user.name # Some like: Fdjw4das
You can setup a field type for generation of fake value: ::
user = mixer.blend(User, score=mixer.random(str))
print user.score # Some like: Fdjw4das
Or you can get random value from choices: ::
user = mixer.blend(User, name=mixer.random('john', 'mike'))
print user.name # mike or john
.. note:: This is also usefull on ORM model generation for randomize fields
with default values (or null).
::
from mixer.backend.django import mixer
mixer.blend('auth.User', first_name=mixer.random)
print user.first_name # Some like: Fdjw4das
"""
def gen_value(self, type_mixer, *args, **kwargs):
""" Call :meth:`TypeMixer.gen_random`.
:return value: A generated value
"""
return type_mixer.gen_random(*args, **kwargs)
class Select(ServiceValue):
""" Select values from database.
When you generate some ORM models you can set value for related fields
from database (select by random).
Example for Django (select user from exists): ::
from mixer.backend.django import mixer
mixer.generate(Role, user=mixer.select)
You can setup a Django or SQLAlchemy filters with `mixer.select`: ::
from mixer.backend.django import mixer
mixer.generate(Role, user=mixer.select(
username='test'
))
"""
def gen_value(self, type_mixer, *args, **kwargs):
""" Call :meth:`TypeMixer.gen_random`.
:return value: A generated value
"""
return type_mixer.gen_select(*args, **kwargs)
class GenFactoryMeta(type):
""" Precache generators. """
def __new__(mcs, name, bases, params):
generators = dict()
fakers = dict()
types = dict()
for cls in bases:
if isinstance(cls, GenFactoryMeta):
generators.update(cls.generators)
fakers.update(cls.fakers)
types.update(cls.types)
generators.update(params.get('generators', dict()))
fakers.update(params.get('fakers', dict()))
types.update(params.get('types', dict()))
generators = dict(mcs.__flat_keys(generators))
types = dict(mcs.__flat_keys(types))
params['generators'] = generators
params['fakers'] = fakers
params['types'] = types
return super(GenFactoryMeta, mcs).__new__(mcs, name, bases, params)
@staticmethod
def __flat_keys(d):
for key, value in d.items():
if isinstance(key, (tuple, list)):
for k in key:
yield k, value
continue
yield key, value
class GenFactory(six.with_metaclass(GenFactoryMeta)):
""" Make generators for types. """
generators = {
bool: g.gen_boolean,
float: g.gen_float,
int: g.gen_integer,
str: g.gen_string,
datetime.date: g.gen_date,
datetime.datetime: g.gen_datetime,
datetime.time: g.gen_time,
decimal.Decimal: g.gen_decimal,
t.BigInteger: g.gen_big_integer,
t.EmailString: f.gen_email,
t.HostnameString: f.gen_hostname,
t.IP4String: f.gen_ip4,
t.NullOrBoolean: g.gen_null_or_boolean,
t.PositiveDecimal: g.gen_positive_decimal,
t.PositiveInteger: g.gen_positive_integer,
t.SmallInteger: g.gen_small_integer,
t.Text: f.gen_lorem,
t.URL: f.gen_url,
t.UUID: f.gen_uuid,
None: g.loop(lambda: ''),
}
fakers = {
('name', str): f.gen_name,
('first_name', str): f.gen_firstname,
('firstname', str): f.gen_firstname,
('last_name', str): f.gen_lastname,
('lastname', str): f.gen_lastname,
('title', str): f.gen_lorem,
('description', str): f.gen_lorem,
('content', str): f.gen_lorem,
('body', str): f.gen_lorem,
('city', str): f.gen_city,
('country', str): f.gen_country,
('email', str): f.gen_email,
('username', str): f.gen_username,
('login', str): f.gen_username,
('domain', str): f.gen_hostname,
('phone', str): f.gen_phone,
('company', str): f.gen_company,
('lat', float): f.gen_latlon,
('latitude', float): f.gen_latlon,
('lon', float): f.gen_latlon,
('longitude', float): f.gen_latlon,
('url', t.URL): f.gen_url,
}
types = {}
@classmethod
def cls_to_simple(cls, fcls):
""" Translate class to one of simple base types.
:return type: A simple type for generation
"""
return cls.types.get(fcls) or (
fcls if fcls in cls.generators
else None
)
@staticmethod
def name_to_simple(fname):
""" Translate name to one of simple base names.
:return str:
"""
fname = fname or ''
return fname.lower().strip()
@classmethod
def gen_maker(cls, fcls, fname=None, fake=False):
""" Make a generator based on class and name.
:return generator:
"""
fcls = cls.cls_to_simple(fcls)
fname = cls.name_to_simple(fname)
gen_maker = cls.generators.get(fcls)
if fname and fake and (fname, fcls) in cls.fakers:
gen_maker = cls.fakers.get((fname, fcls)) or gen_maker
return gen_maker
class TypeMixerMeta(type):
""" Cache mixers by class. """
mixers = dict()
def __call__(cls, cls_type, mixer=None, factory=None, fake=True):
backup = cls_type
try:
cls_type = cls.__load_cls(cls_type)
assert cls_type
except (AttributeError, AssertionError):
raise ValueError('Invalid scheme: %s' % backup)
key = (mixer, cls_type, fake, factory)
if not key in cls.mixers:
cls.mixers[key] = super(TypeMixerMeta, cls).__call__(
cls_type, mixer=mixer, factory=factory, fake=fake,
)
return cls.mixers[key]
@staticmethod
def __load_cls(cls_type):
if isinstance(cls_type, six.string_types):
mod, cls_type = cls_type.rsplit('.', 1)
mod = import_module(mod)
cls_type = getattr(mod, cls_type)
return cls_type
class TypeMixer(six.with_metaclass(TypeMixerMeta)):
""" Generate models. """
factory = GenFactory
fake = Fake()
select = Select()
random = Random()
def __init__(self, cls, mixer=None, factory=None, fake=True):
self.__scheme = cls
self.__mixer = mixer
self.__fake = fake
self.__fields = dict(self.__load_fields())
self.__factory = factory or self.factory
self.__generators = dict()
self.__gen_values = defaultdict(set)
def __repr__(self):
return "<TypeMixer {0}>".format(self.__scheme)
def blend(self, **values):
""" Generate instance.
:param **values: Predefined fields
:return value: a generated value
"""
target = self.__scheme()
defaults = deepcopy(self.__fields)
# Prepare relations
for key, params in values.items():
if '__' in key:
rname, rvalue = key.split('__', 1)
field = defaults.get(rname)
if not isinstance(field, Relation):
defaults[rname] = Relation(
field and field.scheme or field,
field and field.name or rname)
defaults[rname].params.update({rvalue: params})
continue
defaults[key] = params
# Fill fields in 2 steps
post_values = [
item for item in [
self.set_value(target, fname, fvalue, finaly=True)
for (fname, fvalue) in [
item for item in self.fill_fields(target, defaults) if item
]
] if item
]
if self.__mixer:
target = self.__mixer.post_generate(target)
for fname, fvalue in post_values:
setattr(target, fname, fvalue)
return target
def fill_fields(self, target, defaults):
""" Fill all required fields. """
for fname, fvalue in defaults.items():
if isinstance(fvalue, ServiceValue):
yield fvalue.gen_value(self, target, fname, fvalue)
else:
yield self.set_value(target, fname, fvalue)
def set_value(self, target, field_name, field_value, finaly=False):
""" Set `value` to `target` as `field_name`.
:return : None or (name, value) for later use
"""
if isinstance(field_value, Mix):
if not finaly:
return field_name, field_value
return self.set_value(
target, field_name, field_value & target, finaly=finaly)
if callable(field_value):
return self.set_value(
target, field_name, field_value(), finaly=finaly)
if isinstance(field_value, GeneratorType):
return self.set_value(
target, field_name, next(field_value), finaly=finaly)
setattr(target, field_name, field_value)
def gen_value(self, target, field_name, field_class, fake=None,
unique=False):
""" Generate values from basic types.
Set value to target.
:return : None or (name, value) for later use
"""
fake = self.__fake if fake is None else fake
gen = self.get_generator(field_class, field_name, fake=fake)
value = next(gen)
if unique:
counter = 0
while value in self.__gen_values[field_class]:
value = next(gen)
counter += 1
if counter > 100:
raise RuntimeError(
"Cannot generate a unique value for %s" % field_name
)
self.__gen_values[field_class].add(value)
return self.set_value(target, field_name, value)
def gen_field(self, target, field_name, field):
""" Generate value by field.
:param target: Target for generate value.
:param field_name: Name of field for generation.
:param relation: Instance of :class:`Field`
:return : None or (name, value) for later use
"""
default = self.get_default(field, target)
if not default is NO_VALUE:
return self.set_value(target, field_name, default)
required = self.is_required(field)
if not required:
return False
unique = self.is_unique(field)
return self.gen_value(target, field_name, field.scheme, unique=unique)
def gen_relation(self, target, field_name, relation, force=False):
""" Generate a related field by `relation`.
:param target: Target for generate value.
:param field_name: Name of field for generation.
:param relation: Instance of :class:`Relation`
:param force: Force a value generation
:return : None or (name, value) for later use
"""
mixer = TypeMixer(relation.scheme, self.__mixer, self.__factory)
return self.set_value(
target, field_name, mixer.blend(**relation.params))
def gen_random(self, target, field_name, field_value):
""" Generate random value of field with `field_name` for `target`.
:param target: Target for generate value.
:param field_name: Name of field for generation.
:param field_value: Instance of :class:`~mixer.main.Random`.
:return : None or (name, value) for later use
"""
if field_value.args:
scheme = field_value.args[0]
if not isinstance(scheme, type):
return self.set_value(
target, field_name, g.get_choice(field_value.args))
else:
scheme = self.__fields.get(field_name)
if scheme:
if scheme.is_relation:
return self.gen_relation(target, field_name, scheme, True)
scheme = scheme.scheme
return self.gen_value(target, field_name, scheme, fake=False)
gen_select = gen_random
def gen_fake(self, target, field_name, field_value):
""" Generate fake value of field with `field_name` for `target`.
:param target: Target for generate value.
:param field_name: Name of field for generation.
:param field_value: Instance of :class:`~mixer.main.Fake`.
:return : None or (name, value) for later use
"""
if field_value.args:
scheme = field_value.args[0]
else:
field = self.__fields.get(field_name)
scheme = field and field.scheme or field
return self.gen_value(target, field_name, scheme, fake=True)
def get_generator(self, field_class, field_name=None, fake=None):
""" Get generator for field and cache it.
:param field_class: Class for looking a generator
:param field_name: Name of field for generation
:param fake: Generate fake data instead of random data.
:return generator:
"""
if fake is None:
fake = self.__fake
key = (field_class, field_name, fake)
if not key in self.__generators:
self.__generators[key] = self.make_generator(
field_class, field_name, fake)
return self.__generators[key]
def make_generator(self, field_class, field_name=None, fake=None):
""" Make generator for class.
:param field_class: Class for looking a generator
:param field_name: Name of field for generation
:param fake: Generate fake data instead of random data.
:return generator:
"""
return self.__factory.gen_maker(field_class, field_name, fake)()
@staticmethod
def is_unique(field):
""" Return True is field's value should be a unique.
:return bool:
"""
return False
@staticmethod
def is_required(field):
""" Return True is field's value should be defined.
:return bool:
"""
return True
@staticmethod
def get_default(field, target):
""" Get default value from field.
:return value:
"""
return NO_VALUE
def __load_fields(self):
""" GenFactory of scheme's fields. """
for fname in dir(self.__scheme):
if fname.startswith('_'):
continue
yield fname, Field(getattr(self.__scheme, fname), fname)
class MetaMixer:
""" A Mixer proxy. Using for generate a few objects.
::
mixer.cycle(5).blend(somemodel)
"""
def __init__(self, mixer, count=5):
self.count = count
self.mixer = mixer
def blend(self, scheme, **values):
""" Call :meth:`Mixer.blend` a few times. And stack results to list.
:return list: list of generated objects.
"""
result = []
for _ in range(self.count):
result.append(
self.mixer.blend(scheme, **values)
)
return result
def __getattr__(self, name):
raise AttributeError('Use "cycle" only for "blend"')
class Mixer(object):
""" This class is used for integration to one or more applications.
:param fake: (True) Generate fake data instead of random data.
:param factory: (:class:`~mixer.main.GenFactory`) Fabric of generators
for types values
::
class SomeScheme:
score = int
name = str
mixer = Mixer()
instance = mixer.blend(SomeScheme)
print instance.name # Some like: 'Mike Douglass'
mixer = Mixer(fake=False)
instance = mixer.blend(SomeScheme)
print instance.name # Some like: 'AKJfdjh3'
"""
#: Force a fake values. See :class:`~mixer.main.Fake`
fake = Fake()
#: Force a random values. See :class:`~mixer.main.Random`
random = Random()
#: Select a data from databases. See :class:`~mixer.main.Select`
select = Select()
#: Points to a mixed object from future. See :class:`~mixer.main.Mix`
mix = Mix()
# generator's controller class
type_mixer_cls = TypeMixer
def __init__(self, fake=True, factory=None, **params):
"""Initialize Mixer instance.
:param fake: (True) Generate fake data instead of random data.
:param factory: (:class:`~mixer.main.GenFactory`) A class for
generation values for types
"""
self.params = params
self.factory = factory
self.fake = fake
def __repr__(self):
return "<Mixer [{0}]>".format('fake' if self.fake else 'rand')
def blend(self, scheme, **values):
"""Generate instance of `scheme`.
:param scheme: Scheme class for generation or string with class path.
:param values: Keyword params with predefined values
:return value: A generated instance
::
mixer = Mixer()
mixer.blend(SomeSheme, active=True)
print scheme.active # True
mixer.blend('module.SomeSheme', active=True)
print scheme.active # True
"""
type_mixer = self.type_mixer_cls(
scheme, mixer=self, fake=self.fake, factory=self.factory)
return type_mixer.blend(**values)
@staticmethod
def post_generate(target):
""" Post processing a generated value.
:return value:
"""
return target
@staticmethod
def sequence(func=None):
""" Create sequence for predefined values.
It makes a infinity loop with given function where does increment the
counter on each iteration.
:param func: If func is equal string it should be using as format
string.
By default function is equal 'lambda x: x'.
:type func: Function from one argument or format string.
:return generator:
Mixer can uses a generators.
::
gen = (name for name in ['test0', 'test1', 'test2'])
for counter in range(3):
mixer.blend(Scheme, name=gen)
Mixer.sequence is a helper for create generators from functions.
::
for counter in range(3):
mixer.blend(Scheme, name=mixer.sequence(
lambda c: 'test%s' % c
))
Short format is a python formating string
::
for counter in range(3):
mixer.blend(Scheme, name=mixer.sequence('test{0}'))
"""
if isinstance(func, six.string_types):
func = func.format
elif func is None:
func = lambda x: x
def gen():
counter = 0
while True:
yield func(counter)
counter += 1
return gen()
def cycle(self, count=5):
""" Generate a few objects. Syntastic sugar for cycles.
:param count: List of objects or integer.
:return MetaMixer:
::
users = mixer.cycle(5).blend('somemodule.User')
profiles = mixer.cycle(5).blend(
'somemodule.Profile', user=(user for user in users)
apples = mixer.cycle(10).blend(
Apple, title=mixer.sequence('apple_{0}')
"""
return MetaMixer(self, count)
# Default mixer
mixer = Mixer()
# lint_ignore=C901,W0622,F0401,W0621,W0231,E1002
| {
"content_hash": "1a79f6ebd76596396b76147e7af8dfde",
"timestamp": "",
"source": "github",
"line_count": 933,
"max_line_length": 79,
"avg_line_length": 27.659163987138264,
"alnum_prop": 0.5717275052313415,
"repo_name": "mattcaldwell/mixer",
"id": "6de19e13664c1b7e06769e5d051291d0d9d01156",
"size": "25806",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "mixer/main.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "112696"
}
],
"symlink_target": ""
} |
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime
from db import Base, Session
# abstract base class for Users
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True, nullable=False)
name = Column(String(75), nullable=False)
email = Column(String(60), nullable=False, unique=True)
passwordhash = Column(String(255), nullable=False)
phone = Column(String(15), nullable=False)
permissions = Column(Enum('volunteer', 'organization', 'admin'), nullable=False)
last_active = Column(DateTime(timezone=False), nullable=True)
token = Column(String(50))
__mapper_args__ = {
'polymorphic_identity':'user',
'polymorphic_on': permissions
}
def __repr__(self):
return "User(%s, %s)" % (self.id, self.name)
| {
"content_hash": "06bc12f7b426adabef2fdf16eccaeabd",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 81,
"avg_line_length": 24.636363636363637,
"alnum_prop": 0.7121771217712177,
"repo_name": "knuevena/americorps-backend",
"id": "cf99072e1a39f7cf93343d1196763264c84d4e59",
"size": "813",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "107640"
}
],
"symlink_target": ""
} |
import subprocess
import threading
import tkinter as Tk
import tkinter.scrolledtext
import tkinter.ttk as ttk
from tkinter.filedialog import askopenfilename
from PIL import Image, ImageTk
class App:
def __init__(self, master):
frame = Tk.Frame(master)
top_menu = Tk.Menu(master)
actions_menu = Tk.Menu(top_menu, tearoff=0)
logo_png = Image.open("ratload_logo.png")
logo_png = logo_png.resize((100, 100), Image.ANTIALIAS)
logo = ImageTk.PhotoImage(logo_png)
logo_label = ttk.Label(master, image=logo, text="ratload",
compound=Tk.LEFT, font=("Helvetica", 24))
file_label = ttk.Label(master, text="Selected File",
font=("Helvetica", 13))
self.file_var = Tk.StringVar()
self.file_entry = ttk.Entry(master, state=Tk.DISABLED,
textvariable=self.file_var)
file_button = ttk.Button(master, text="Choose File...",
command=self.select_file)
device_label = ttk.Label(master, text="Selected Serial Device",
font=("Helvetica", 12))
device_entry_f = ttk.Frame(master)
self.device_var = Tk.StringVar()
self.device_entry = ttk.Combobox(device_entry_f, height=1,
textvariable=self.device_var)
program_button_f = ttk.Frame(master)
program_button = ttk.Button(program_button_f, text="Program\n Board",
command=self.program_board)
results_f = ttk.Frame(master)
self.results = Tk.scrolledtext.ScrolledText(
master=results_f,
wrap=Tk.WORD,
width=40
)
actions_menu.add_command(label="Refresh Serial Devices",
command=self.refresh_serial_devices)
actions_menu.add_command(label="Run Serial Test",
command=self.run_serial_test)
actions_menu.add_command(label="Clear Results Area",
command=self.clear_results)
actions_menu.add_command(label="About / License",
command=self.show_help)
actions_menu.add_command(label="Exit", command=root.quit)
top_menu.add_cascade(label="Menu", menu=actions_menu)
master.config(menu=top_menu)
logo_label.photo = logo
self.refresh_serial_devices()
logo_label.grid(row=0, columnspan=3)
file_label.grid(row=1, column=0, sticky=Tk.E, pady="5")
self.file_entry.grid(row=1, column=1, padx="10 0")
file_button.grid(row=1, column=2, padx="10")
device_label.grid(row=2, column=0, sticky=Tk.E, padx="10 0", pady="5")
device_entry_f.grid(row=2, column=1, columnspan=2,
sticky=Tk.E+Tk.W, padx="10")
self.device_entry.pack(fill=Tk.BOTH)
program_button_f.grid(row=3, columnspan=3, rowspan=2,
sticky=Tk.E+Tk.W+Tk.N+Tk.S, padx="10", pady="5")
program_button.pack(fill=Tk.BOTH)
results_f.grid(row=5, columnspan=3, rowspan=10,
sticky=Tk.E+Tk.W+Tk.N+Tk.S, padx="10", pady="5")
results_f.grid_propagate(False)
self.results.pack(fill=Tk.X)
self.results.config(state=Tk.DISABLED)
# self.results.config(state=NORMAL)
# self.results.delete(1.0, END)
# self.results.insert(END, text)
# self.results.config(state=Tk.DISABLED)
def show_help(self):
pass
def select_file(self):
filename = askopenfilename().strip()
self.file_var.set(filename)
def _do_ratload(self, args):
def run():
proc = subprocess.Popen(
["./ratload_Windows_x86.exe"] + args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
while proc.poll() is None:
byte = proc.stdout.read(1)
self.append(byte)
proc.wait()
self.append(proc.stdout.read())
thread = threading.Thread(target=run)
thread.start()
return thread
def run_serial_test(self):
self.append("Running Serial Test...\n")
self._do_ratload(["-d", self.device_var.get(), "-t"])
def program_board(self):
self.append("Programming Board...\n")
self._do_ratload(["-d", self.device_var.get(),
"-f", self.file_var.get()])
def append(self, s):
self.results.config(state=Tk.NORMAL)
self.results.insert(Tk.END, s)
self.results.config(state=Tk.DISABLED)
def clear_results(self):
self.results.config(state=Tk.NORMAL)
self.results.delete(1.0, Tk.END)
self.results.config(state=Tk.DISABLED)
def refresh_serial_devices(self):
def run():
proc = subprocess.Popen(
["./ratload_Windows_x86", "-l"],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
proc.wait()
self.device_entry.delete(0, Tk.END)
devices = [i.strip().decode("utf-8") for i in
proc.stdout.readlines()]
self.device_entry['values'] = devices
if len(devices):
self.device_entry.current(0)
else:
self.device_entry.insert(0, "__NO_DEVICES_FOUND__")
return
thread = threading.Thread(target=run)
thread.start()
return thread
root = Tk.Tk()
app = App(root)
version = open(".version", "r")
root.resizable(width=Tk.FALSE, height=Tk.FALSE)
root.geometry("{}x{}".format(410, 620))
root.wm_title("ratload_v" + version.read().strip())
root.mainloop()
| {
"content_hash": "2a19a59ef0c94d6699486c57b9f0376b",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 78,
"avg_line_length": 33.61714285714286,
"alnum_prop": 0.5509093999660037,
"repo_name": "jhladky/ratload",
"id": "47c67c66ff048b706d3b715a917ff633aeb162d9",
"size": "5883",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1543"
},
{
"name": "Java",
"bytes": "11563"
},
{
"name": "Makefile",
"bytes": "457"
},
{
"name": "Python",
"bytes": "5957"
},
{
"name": "Shell",
"bytes": "2641"
},
{
"name": "VHDL",
"bytes": "131000"
}
],
"symlink_target": ""
} |
from ciscoconfparse import CiscoConfParse
cisco_config = CiscoConfParse('cisco_ipsec.txt')
crypto = cisco_config.find_objects_w_child(parentspec=r"^crypto map CRYPTO", childspec=r"set pfs group2")
for pfs in crypto:
print pfs.text
for child in pfs.children:
print child.text
| {
"content_hash": "c4b87ae6262900c819402c2495112e96",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 105,
"avg_line_length": 26.90909090909091,
"alnum_prop": 0.7398648648648649,
"repo_name": "kevrodg/pynet",
"id": "e7c980c251fc3675fb7fec259b4e73264481f3ab",
"size": "319",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "1.9.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1526"
}
],
"symlink_target": ""
} |
import manager
m = manager.Manager()
m.set_node(5, 100)
| {
"content_hash": "ee11aeacdbb507c758c36aabcb3ffbf9",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 21,
"avg_line_length": 19.666666666666668,
"alnum_prop": 0.6779661016949152,
"repo_name": "cmeyer/py-home-control",
"id": "b594214b2b8d859527de0b0810f663b3b5cece25",
"size": "59",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "managertest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2977"
}
],
"symlink_target": ""
} |
"""Miscellaneous AMICI Python interface tests"""
import os
import subprocess
from tempfile import TemporaryDirectory
import amici
from amici.ode_export import smart_subs_dict
import libsbml
import pytest
import sympy as sp
from amici.ode_export import _monkeypatched, _custom_pow_eval_derivative
def test_parameter_scaling_from_int_vector():
"""Ensure we can generate a ParameterScaling vector from Python"""
scale_vector = amici.parameterScalingFromIntVector(
[
amici.ParameterScaling.log10,
amici.ParameterScaling.ln,
amici.ParameterScaling.none
])
assert scale_vector[0] == amici.ParameterScaling.log10
assert scale_vector[1] == amici.ParameterScaling.ln
assert scale_vector[2] == amici.ParameterScaling.none
def test_sbml2amici_no_observables():
"""Test model generation works for model without observables"""
# test model
document = libsbml.SBMLDocument(3, 1)
model = document.createModel()
model.setTimeUnits("second")
model.setExtentUnits("mole")
model.setSubstanceUnits('mole')
c1 = model.createCompartment()
c1.setId('C1')
model.addCompartment(c1)
s1 = model.createSpecies()
s1.setId('S1')
s1.setCompartment('C1')
model.addSpecies(s1)
sbml_importer = amici.sbml_import.SbmlImporter(sbml_source=model,
from_file=False)
tmpdir = TemporaryDirectory()
sbml_importer.sbml2amici(modelName="test",
output_dir=tmpdir.name,
observables=None,
compute_conservation_laws=False)
def test_hill_function_dwdx():
"""Kinetic laws with Hill functions, may lead to NaNs in the Jacobian
if involved states are zero if not properly arranged symbolically.
Test that what we are applying the right sympy simplification."""
w = sp.Matrix([[sp.sympify('Pow(x1, p1) / (Pow(x1, p1) + a)')]])
dwdx = w.diff(sp.Symbol('x1'))
# Verify that without simplification we fail
with pytest.raises(ZeroDivisionError):
with sp.evaluate(False):
res = dwdx.subs({'x1': 0.0})
_ = str(res)
# Test that powsimp does the job
dwdx = dwdx.applyfunc(lambda x: sp.powsimp(x, deep=True))
with sp.evaluate(False):
res = dwdx.subs({'x1': 0.0})
_ = str(res)
@pytest.mark.skipif(os.environ.get('AMICI_SKIP_CMAKE_TESTS', '') == 'TRUE',
reason='skipping cmake based test')
def test_cmake_compilation(sbml_example_presimulation_module):
"""Check that CMake build succeeds for one of the models generated during
Python tests"""
source_dir = os.path.dirname(sbml_example_presimulation_module.__path__[0])
cmd = f"set -e; cd {source_dir}; mkdir -p build; cd build; "\
"cmake ..; make"
subprocess.run(cmd, shell=True, check=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def test_smart_subs_dict():
expr_str = 'c + d'
subs_dict = {
'c': 'a + b',
'd': 'c + a',
}
expected_default_str = '3*a + 2*b'
expected_reverse_str = '2*a + b + c'
expr_sym = sp.sympify(expr_str)
subs_sym = {sp.sympify(k): sp.sympify(v) for k, v in subs_dict.items()}
expected_default = sp.sympify(expected_default_str)
expected_reverse = sp.sympify(expected_reverse_str)
result_default = smart_subs_dict(expr_sym, subs_sym)
result_reverse = smart_subs_dict(expr_sym, subs_sym, reverse=False)
assert sp.simplify(result_default - expected_default).is_zero
assert sp.simplify(result_reverse - expected_reverse).is_zero
def test_monkeypatch():
t = sp.Symbol('t')
n = sp.Symbol('n')
vals = [(t, 0),
(n, 1)]
# check that the removable singularity still exists
assert (t**n).diff(t).subs(vals) is sp.nan
# check that we can monkeypatch it out
with _monkeypatched(sp.Pow, '_eval_derivative',
_custom_pow_eval_derivative):
assert (t ** n).diff(t).subs(vals) is not sp.nan
# check that the monkeypatch is transient
assert (t ** n).diff(t).subs(vals) is sp.nan
| {
"content_hash": "3cf5af562eb4aa60e4cfc38a0312fb65",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 79,
"avg_line_length": 32.6953125,
"alnum_prop": 0.6370370370370371,
"repo_name": "AMICI-developer/AMICI",
"id": "6445b5d5117b8f418e13cf30b58cf10f2cbc8a79",
"size": "4185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/tests/test_misc.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "6507412"
},
{
"name": "C++",
"bytes": "4515"
},
{
"name": "CMake",
"bytes": "89132"
},
{
"name": "FORTRAN",
"bytes": "115964"
},
{
"name": "Makefile",
"bytes": "196218"
},
{
"name": "Matlab",
"bytes": "436730"
},
{
"name": "Perl",
"bytes": "9412"
},
{
"name": "TeX",
"bytes": "131408"
}
],
"symlink_target": ""
} |
"""
test_versioning.py
All things to do with testing OS versions.
Created by Niall Richard Murphy on 2011-05-30.
"""
import config_parse
import constants
import test_infrastructure as ti
# Perhaps unittest2 is available. Try to import it, for
# those cases where we are running python 2.7.
try:
import unittest2 as unittest
except ImportError:
import unittest
def suite():
tests = ['testVersionDefined',
'testVersionMenagerie']
return unittest.TestSuite(map(TestBulkVersioning, tests))
class TestBulkVersioning(ti.defaultTestConfiguration):
def testVersionDefined(self):
results = ti.IndividualAttributesDefined(self.cp.routers, 'os_version')
self.assertEqual(results, [],
"These routers should have OS versions available: %s" % (results))
def testVersionMenagerie(self):
distinct_os_versions = ti.GroupSetCount(self.cp.routers, 'os_version')
self.assertTrue(distinct_os_versions <= 3,
"You are probably running too many distinct OS versions on your network: %d" %
distinct_os_versions)
if __name__ == '__main__':
suite = suite()
unittest.TextTestRunner(verbosity=2).run(suite)
| {
"content_hash": "7701910dd122f9f4d17c299eeb7de27d",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 84,
"avg_line_length": 27.523809523809526,
"alnum_prop": 0.726643598615917,
"repo_name": "niallrmurphy/pyvern",
"id": "07c47a6ab3fc16f40b74f8099231a1c3cccf8091",
"size": "1196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/test_versioning.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "912"
},
{
"name": "Python",
"bytes": "123073"
}
],
"symlink_target": ""
} |
from django import forms
from .models import Post
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = ('title', 'text',)
class ConfirmForm(forms.ModelForm):
class Meta:
model = Post
fields = ()
| {
"content_hash": "7d50f33c17c74321e401bd70ebc94f12",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 35,
"avg_line_length": 16.9375,
"alnum_prop": 0.5719557195571956,
"repo_name": "pkimber/blog",
"id": "6b3b5a39204b0b65d7b47220754e1d6cdb5f2afb",
"size": "297",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blog/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "6523"
},
{
"name": "Python",
"bytes": "17188"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.dev")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| {
"content_hash": "dffc5260aed14978ca3bfd0ce6d4a376",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 74,
"avg_line_length": 37.142857142857146,
"alnum_prop": 0.6557692307692308,
"repo_name": "andree1320z/deport-upao-web",
"id": "90baafe60a518e42e83009cabae892d04ef9d38b",
"size": "542",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deport_upao/manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "29480"
},
{
"name": "HTML",
"bytes": "24698"
},
{
"name": "JavaScript",
"bytes": "23139"
},
{
"name": "Python",
"bytes": "24319"
}
],
"symlink_target": ""
} |
"""
SeaHorn Verification Framework
Copyright (c) 2015 Carnegie Mellon University.
All Rights Reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following acknowledgments and
disclaimers.
2. Redistributions in binary form must reproduce the
above copyright notice, this list of conditions and the following
acknowledgments and disclaimers in the documentation and/or other
materials provided with the distribution.
3. Products derived from this software may not include “Carnegie
Mellon University,” "SEI” and/or “Software Engineering Institute" in
the name of such derived product, nor shall “Carnegie Mellon
University,” "SEI” and/or “Software Engineering Institute" be used to
endorse or promote products derived from this software without prior
written permission. For written permission, please contact
permission@sei.cmu.edu.
ACKNOWLEDGMENTS AND DISCLAIMERS:
Copyright 2015 Carnegie Mellon University
This material is based upon work funded and supported by the
Department of Defense under Contract No. FA8721-05-C-0003 with
Carnegie Mellon University for the operation of the Software
Engineering Institute, a federally funded research and development
center. Moreover, this work is funded by NASA NRA Contract No. NNX14AI09G
and NSF Award No. 1422705
Any opinions, findings and conclusions or recommendations expressed in
this material are those of the author(s) and do not necessarily
reflect the views of the United States Department of Defense, NASA or NSF.
NO WARRANTY. THIS CARNEGIE MELLON UNIVERSITY AND SOFTWARE ENGINEERING
INSTITUTE MATERIAL IS FURNISHED ON AN “AS-IS” BASIS. CARNEGIE MELLON
UNIVERSITY MAKES NO WARRANTIES OF ANY KIND, EITHER EXPRESSED OR
IMPLIED, AS TO ANY MATTER INCLUDING, BUT NOT LIMITED TO, WARRANTY OF
FITNESS FOR PURPOSE OR MERCHANTABILITY, EXCLUSIVITY, OR RESULTS
OBTAINED FROM USE OF THE MATERIAL. CARNEGIE MELLON UNIVERSITY DOES NOT
MAKE ANY WARRANTY OF ANY KIND WITH RESPECT TO FREEDOM FROM PATENT,
TRADEMARK, OR COPYRIGHT INFRINGEMENT.
This material has been approved for public release and unlimited
distribution.
DM-0002198
"""
import os
try:
import benchexec.util as util
import benchexec.result as result
from benchexec.tools.template import BaseTool
except ImportError:
# fall-back solution (at least for now)
import symbiotic.benchexec.util as util
import symbiotic.benchexec.result as result
from symbiotic.benchexec.tools.template import BaseTool
class Tool(BaseTool):
REQUIRED_PATHS = [
"bin",
"include",
"lib",
"share"
]
def executable(self):
return util.find_executable('sea_svcomp', os.path.join("bin", 'sea_svcomp'))
def program_files(self, executable):
installDir = os.path.join(os.path.dirname(executable), os.path.pardir)
return util.flatten(util.expand_filename_pattern(path, installDir) for path in self.REQUIRED_PATHS)
def name(self):
return 'SeaHorn-F16'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
assert propertyfile is not None
spec = ['--spec=' + propertyfile]
return [executable] + options + spec + tasks
def version(self, executable):
return self._version_from_tool(executable)
def determine_result(self, returncode, returnsignal, output, isTimeout):
output = '\n'.join(output)
if "BRUNCH_STAT Result TRUE" in output:
status = result.RESULT_TRUE_PROP
elif "BRUNCH_STAT Result FALSE" in output:
if "BRUNCH_STAT Termination" in output:
status = result.RESULT_FALSE_TERMINATION
else:
status = result.RESULT_FALSE_REACH
elif "BRUNCH_STAT Result UNKNOWN" in output:
status = result.RESULT_UNKNOWN
elif returnsignal == 9 or returnsignal == (128+9):
if isTimeout:
status = "TIMEOUT"
else:
status = "KILLED BY SIGNAL 9"
elif returncode != 0:
status = "ERROR ({0})".format(returncode)
else:
status = 'FAILURE'
return status
| {
"content_hash": "7cff194f7b61513c438bd5e0cd78ae93",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 107,
"avg_line_length": 37.18803418803419,
"alnum_prop": 0.7246609974718455,
"repo_name": "staticafi/symbiotic",
"id": "4c1d7072edfe9d19cb5cc8600a9adb7264b2bafb",
"size": "4396",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/symbioticpy/symbiotic/benchexec/tools/seahorn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1494"
},
{
"name": "C",
"bytes": "70042"
},
{
"name": "C++",
"bytes": "155217"
},
{
"name": "CMake",
"bytes": "5016"
},
{
"name": "Dockerfile",
"bytes": "723"
},
{
"name": "Makefile",
"bytes": "85"
},
{
"name": "Python",
"bytes": "363105"
},
{
"name": "SWIG",
"bytes": "814"
},
{
"name": "Shell",
"bytes": "63299"
}
],
"symlink_target": ""
} |
from aiohttp import ClientSession, TCPConnector
import asyncio
from .dispatch import Dispatcher
from .utils import generateFailedResponse
async def asyncQuery(inputIDs, session=None):
"""Asynchronously make a list of API calls."""
responses = {}
if not session:
async with ClientSession(connector=TCPConnector(ssl=False)) as session:
dp = Dispatcher(inputIDs, session)
dp.dispatch()
for task in dp.tasks.values():
res = await asyncio.gather(*task)
for _res in res:
responses.update(_res)
return responses
else:
dp = Dispatcher(inputIDs, session)
dp.dispatch()
if dp.invalid and isinstance(dp.invalid, dict):
for semantic_type, curies in dp.invalid.items():
if curies and len(curies) > 0:
for curie in curies:
responses[curie] = generateFailedResponse(curie, semantic_type)
for task in dp.tasks.values():
res = await asyncio.gather(*task)
for _res in res:
if _res:
responses.update(_res)
return responses
def syncQuery(inputIDs, loop=None):
if not loop:
loop = asyncio.new_event_loop()
return loop.run_until_complete(asyncQuery(inputIDs))
| {
"content_hash": "2854f2134c6329b9972fcf85015112fc",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 87,
"avg_line_length": 34.743589743589745,
"alnum_prop": 0.5933579335793358,
"repo_name": "biothings/biothings_explorer",
"id": "138e19c8abc6364d2cb88f4592dd7640458466ce",
"size": "1355",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "biothings_explorer/resolve_ids/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2401542"
},
{
"name": "Jupyter Notebook",
"bytes": "14468811"
},
{
"name": "Python",
"bytes": "585318"
}
],
"symlink_target": ""
} |
"""
Salt Edge Account Information API
API Reference for services # noqa: E501
OpenAPI spec version: 5.0.0
Contact: support@saltedge.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.connection_response import ConnectionResponse # noqa: E501
from swagger_client.rest import ApiException
class TestConnectionResponse(unittest.TestCase):
"""ConnectionResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testConnectionResponse(self):
"""Test ConnectionResponse"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.connection_response.ConnectionResponse() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "3b15b28dc60a5c8f3779723ab486b170",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 94,
"avg_line_length": 24.945945945945947,
"alnum_prop": 0.7009750812567714,
"repo_name": "ltowarek/budget-supervisor",
"id": "f98d400117fc4224b6e3fbe4cd8ac4fc4c5f2baa",
"size": "940",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/saltedge/test/test_connection_response.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7960"
},
{
"name": "JavaScript",
"bytes": "79489"
}
],
"symlink_target": ""
} |
from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection
from django.contrib.gis.gdal import OGRGeomType
class GeoIntrospectionError(Exception):
pass
class PostGISIntrospection(DatabaseIntrospection):
# Reverse dictionary for PostGIS geometry types not populated until
# introspection is actually performed.
postgis_types_reverse = {}
def get_postgis_types(self):
"""
Returns a dictionary with keys that are the PostgreSQL object
identification integers for the PostGIS geometry and/or
geography types (if supported).
"""
cursor = self.connection.cursor()
# The OID integers associated with the geometry type may
# be different across versions; hence, this is why we have
# to query the PostgreSQL pg_type table corresponding to the
# PostGIS custom data types.
oid_sql = 'SELECT "oid" FROM "pg_type" WHERE "typname" = %s'
try:
cursor.execute(oid_sql, ('geometry',))
GEOM_TYPE = cursor.fetchone()[0]
postgis_types = { GEOM_TYPE : 'GeometryField' }
if self.connection.ops.geography:
cursor.execute(oid_sql, ('geography',))
GEOG_TYPE = cursor.fetchone()[0]
# The value for the geography type is actually a tuple
# to pass in the `geography=True` keyword to the field
# definition.
postgis_types[GEOG_TYPE] = ('GeometryField', {'geography' : True})
finally:
cursor.close()
return postgis_types
def get_field_type(self, data_type, description):
if not self.postgis_types_reverse:
# If the PostGIS types reverse dictionary is not populated, do so
# now. In order to prevent unnecessary requests upon connection
# intialization, the `data_types_reverse` dictionary is not updated
# with the PostGIS custom types until introspection is actually
# performed -- in other words, when this function is called.
self.postgis_types_reverse = self.get_postgis_types()
self.data_types_reverse.update(self.postgis_types_reverse)
return super(PostGISIntrospection, self).get_field_type(data_type, description)
def get_geometry_type(self, table_name, geo_col):
"""
The geometry type OID used by PostGIS does not indicate the particular
type of field that a geometry column is (e.g., whether it's a
PointField or a PolygonField). Thus, this routine queries the PostGIS
metadata tables to determine the geometry type,
"""
cursor = self.connection.cursor()
try:
try:
# First seeing if this geometry column is in the `geometry_columns`
cursor.execute('SELECT "coord_dimension", "srid", "type" '
'FROM "geometry_columns" '
'WHERE "f_table_name"=%s AND "f_geometry_column"=%s',
(table_name, geo_col))
row = cursor.fetchone()
if not row: raise GeoIntrospectionError
except GeoIntrospectionError:
if self.connection.ops.geography:
cursor.execute('SELECT "coord_dimension", "srid", "type" '
'FROM "geography_columns" '
'WHERE "f_table_name"=%s AND "f_geography_column"=%s',
(table_name, geo_col))
row = cursor.fetchone()
if not row:
raise Exception('Could not find a geometry or geography column for "%s"."%s"' %
(table_name, geo_col))
# OGRGeomType does not require GDAL and makes it easy to convert
# from OGC geom type name to Django field.
field_type = OGRGeomType(row[2]).django
# Getting any GeometryField keyword arguments that are not the default.
dim = row[0]
srid = row[1]
field_params = {}
if srid != 4326:
field_params['srid'] = srid
if dim != 2:
field_params['dim'] = dim
finally:
cursor.close()
return field_type, field_params
| {
"content_hash": "bfd0461ab8d897cfd27282f0e8a79d30",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 95,
"avg_line_length": 47.1578947368421,
"alnum_prop": 0.56875,
"repo_name": "rimbalinux/MSISDNArea",
"id": "e364848761facbab7946946b2c88f8bb9c03625d",
"size": "4480",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "django/contrib/gis/db/backends/postgis/introspection.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "118069"
},
{
"name": "Python",
"bytes": "7281875"
}
],
"symlink_target": ""
} |
import time, sys
import usb
from threading import Thread
import logging
import cflib
from cflib.crazyflie import Crazyflie
from cfclient.utils.logconfigreader import LogConfig
from cfclient.utils.logconfigreader import LogVariable
logging.basicConfig(level=logging.INFO)
class _GetchUnix:
def __init__(self):
import tty, sys
def __call__(self):
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class Main:
def __init__(self):
self.thrust = 25000
self.pitch = 4
self.roll = 2
self.yaw = 0
self.stopping = False
self.jump = 0
Thread(target=self.gui).start()
self.crazyflie = Crazyflie()
cflib.crtp.init_drivers()
# You may need to update this value if your Crazyradio uses a different frequency.
#self.crazyflie.open_link("radio://0/7/250K")
self.crazyflie.open_link("radio://0/10/250K")
#self.crazyflie.open_link("radio://0/6/1M")
self.crazyflie.connectSetupFinished.add_callback(self.connectSetupFinished)
def connectSetupFinished(self, linkURI):
# Start a separate thread to do the motor test.
# Do not hijack the calling thread!
Thread(target=self.pulse_command).start()
def gui(self):
print "bingo"
while self.stopping==False:
#nb = _GetchUnix()
nb = sys.stdin.read(1)
if nb=='x':
self.stopping = True
if nb=='r':
self.thrust = self.thrust + 1000
if nb=='f':
self.thrust = self.thrust - 1000
if nb=='3':
self.thrust = 35000
if nb=='4':
self.thrust = 39000
if nb=='e':
self.yaw = self.yaw + 1
if nb=='q':
self.yaw = self.yaw - 1
if nb=='d':
self.roll = self.roll + 2
if nb=='a':
self.roll = self.roll - 2
if nb=='w':
self.pitch = self.pitch - 2
if nb=='s':
self.pitch = self.pitch + 2
if nb=='z':
self.jump = 2
sys.stdout.write("thrust=")
print self.thrust
sys.stdout.write("yaw=")
print self.yaw
sys.stdout.write("pitch=")
print self.pitch
sys.stdout.write("roll=")
print self.roll
def pulse_command(self):
while self.stopping == False:
if self.jump > 0:
lt = self.thrust + 25000
self.crazyflie.commander.send_setpoint(self.roll, self.pitch, self.yaw, lt)
self.jump = self.jump - 1
sys.stdout.write("lt=")
print lt
else:
self.crazyflie.commander.send_setpoint(self.roll, self.pitch, self.yaw, self.thrust)
time.sleep(0.15)
self.crazyflie.commander.send_setpoint(0,0,0,0)
time.sleep(0.1)
self.crazyflie.close_link()
Main()
| {
"content_hash": "cb396f73509ffb37942419a9ef94ac04",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 90,
"avg_line_length": 26.12727272727273,
"alnum_prop": 0.6085594989561587,
"repo_name": "caviv/dancing-drone",
"id": "cbf193311d3c7696f96456ce17fa783c763d4b6f",
"size": "2949",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "4.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "25592"
}
],
"symlink_target": ""
} |
import datetime
import decimal
import warnings
from importlib import import_module
from django.conf import settings
from django.db.backends import utils
from django.utils import six, timezone
from django.utils.dateparse import parse_duration
from django.utils.deprecation import RemovedInDjango19Warning
from django.utils.encoding import force_text
class BaseDatabaseOperations(object):
"""
This class encapsulates all backend-specific differences, such as the way
a backend performs ordering or calculates the ID of a recently-inserted
row.
"""
compiler_module = "django.db.models.sql.compiler"
# Integer field safe ranges by `internal_type` as documented
# in docs/ref/models/fields.txt.
integer_field_ranges = {
'SmallIntegerField': (-32768, 32767),
'IntegerField': (-2147483648, 2147483647),
'BigIntegerField': (-9223372036854775808, 9223372036854775807),
'PositiveSmallIntegerField': (0, 32767),
'PositiveIntegerField': (0, 2147483647),
}
def __init__(self, connection):
self.connection = connection
self._cache = None
def autoinc_sql(self, table, column):
"""
Returns any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary.
This SQL is executed when a table is created.
"""
return None
def bulk_batch_size(self, fields, objs):
"""
Returns the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
"""
return len(objs)
def cache_key_culling_sql(self):
"""
Returns an SQL query that retrieves the first cache key greater than the
n smallest.
This is used by the 'db' cache backend to determine where to start
culling.
"""
return "SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s"
def unification_cast_sql(self, output_field):
"""
Given a field instance, returns the SQL necessary to cast the result of
a union to that type. Note that the resulting string should contain a
'%s' placeholder for the expression being cast.
"""
return '%s'
def date_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
extracts a value from the given date field field_name.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_extract_sql() method')
def date_interval_sql(self, timedelta):
"""
Implements the date interval functionality for expressions
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_interval_sql() method')
def date_trunc_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
truncates the given date field field_name to a date object with only
the given specificity.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetrunc_sql() method')
def datetime_cast_sql(self):
"""
Returns the SQL necessary to cast a datetime value so that it will be
retrieved as a Python datetime object instead of a string.
This SQL should include a '%s' in place of the field's name.
"""
return "%s"
def datetime_extract_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute' or
'second', returns the SQL that extracts a value from the given
datetime field field_name, and a tuple of parameters.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_extract_sql() method')
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute' or
'second', returns the SQL that truncates the given datetime field
field_name to a datetime object with only the given specificity, and
a tuple of parameters.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_trunk_sql() method')
def deferrable_sql(self):
"""
Returns the SQL necessary to make a constraint "initially deferred"
during a CREATE TABLE statement.
"""
return ''
def distinct_sql(self, fields):
"""
Returns an SQL DISTINCT clause which removes duplicate rows from the
result set. If any fields are given, only the given fields are being
checked for duplicates.
"""
if fields:
raise NotImplementedError('DISTINCT ON fields is not supported by this database backend')
else:
return 'DISTINCT'
def drop_foreignkey_sql(self):
"""
Returns the SQL command that drops a foreign key.
"""
return "DROP CONSTRAINT"
def drop_sequence_sql(self, table):
"""
Returns any SQL necessary to drop the sequence for the given table.
Returns None if no SQL is necessary.
"""
return None
def fetch_returned_insert_id(self, cursor):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table that has an auto-incrementing ID, returns the
newly created ID.
"""
return cursor.fetchone()[0]
def field_cast_sql(self, db_type, internal_type):
"""
Given a column type (e.g. 'BLOB', 'VARCHAR'), and an internal type
(e.g. 'GenericIPAddressField'), returns the SQL necessary to cast it
before using it in a WHERE statement. Note that the resulting string
should contain a '%s' placeholder for the column being searched against.
"""
return '%s'
def force_no_ordering(self):
"""
Returns a list used in the "ORDER BY" clause to force no ordering at
all. Returning an empty list means that nothing will be included in the
ordering.
"""
return []
def for_update_sql(self, nowait=False):
"""
Returns the FOR UPDATE SQL clause to lock rows for an update operation.
"""
if nowait:
return 'FOR UPDATE NOWAIT'
else:
return 'FOR UPDATE'
def fulltext_search_sql(self, field_name):
"""
Returns the SQL WHERE clause to use in order to perform a full-text
search of the given field_name. Note that the resulting string should
contain a '%s' placeholder for the value being searched against.
"""
raise NotImplementedError('Full-text search is not implemented for this database backend')
def last_executed_query(self, cursor, sql, params):
"""
Returns a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders, and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
"""
# Convert params to contain Unicode values.
to_unicode = lambda s: force_text(s, strings_only=True, errors='replace')
if isinstance(params, (list, tuple)):
u_params = tuple(to_unicode(val) for val in params)
elif params is None:
u_params = ()
else:
u_params = {to_unicode(k): to_unicode(v) for k, v in params.items()}
return six.text_type("QUERY = %r - PARAMS = %r") % (sql, u_params)
def last_insert_id(self, cursor, table_name, pk_name):
"""
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, returns the newly created ID.
This method also receives the table name and the name of the primary-key
column.
"""
return cursor.lastrowid
def lookup_cast(self, lookup_type, internal_type=None):
"""
Returns the string to use in a query when performing lookups
("contains", "like", etc). The resulting string should contain a '%s'
placeholder for the column being searched against.
"""
return "%s"
def max_in_list_size(self):
"""
Returns the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
"""
return None
def max_name_length(self):
"""
Returns the maximum length of table and column names, or None if there
is no limit.
"""
return None
def no_limit_value(self):
"""
Returns the value to use for the LIMIT when we are wanting "LIMIT
infinity". Returns None if the limit clause can be omitted in this case.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a no_limit_value() method')
def pk_default_value(self):
"""
Returns the value to use during an INSERT statement to specify that
the field should use its default value.
"""
return 'DEFAULT'
def prepare_sql_script(self, sql, _allow_fallback=False):
"""
Takes a SQL script that may contain multiple lines and returns a list
of statements to feed to successive cursor.execute() calls.
Since few databases are able to process raw SQL scripts in a single
cursor.execute() call and PEP 249 doesn't talk about this use case,
the default implementation is conservative.
"""
# Remove _allow_fallback and keep only 'return ...' in Django 1.9.
try:
# This import must stay inside the method because it's optional.
import sqlparse
except ImportError:
if _allow_fallback:
# Without sqlparse, fall back to the legacy (and buggy) logic.
warnings.warn(
"Providing initial SQL data on a %s database will require "
"sqlparse in Django 1.9." % self.connection.vendor,
RemovedInDjango19Warning)
from django.core.management.sql import _split_statements
return _split_statements(sql)
else:
raise
else:
return [sqlparse.format(statement, strip_comments=True)
for statement in sqlparse.split(sql) if statement]
def process_clob(self, value):
"""
Returns the value of a CLOB column, for backends that return a locator
object that requires additional processing.
"""
return value
def return_insert_id(self):
"""
For backends that support returning the last insert ID as part
of an insert query, this method returns the SQL and params to
append to the INSERT query. The returned fragment should
contain a format string to hold the appropriate column.
"""
pass
def compiler(self, compiler_name):
"""
Returns the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
"""
if self._cache is None:
self._cache = import_module(self.compiler_module)
return getattr(self._cache, compiler_name)
def quote_name(self, name):
"""
Returns a quoted version of the given table, index or column name. Does
not quote the given name if it's already been quoted.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a quote_name() method')
def random_function_sql(self):
"""
Returns an SQL expression that returns a random value.
"""
return 'RANDOM()'
def regex_lookup(self, lookup_type):
"""
Returns the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). The resulting string should
contain a '%s' placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), a
NotImplementedError exception can be raised.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a regex_lookup() method')
def savepoint_create_sql(self, sid):
"""
Returns the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
"""
return "SAVEPOINT %s" % self.quote_name(sid)
def savepoint_commit_sql(self, sid):
"""
Returns the SQL for committing the given savepoint.
"""
return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
def savepoint_rollback_sql(self, sid):
"""
Returns the SQL for rolling back the given savepoint.
"""
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
def set_time_zone_sql(self):
"""
Returns the SQL that will set the connection's time zone.
Returns '' if the backend doesn't support time zones.
"""
return ''
def sql_flush(self, style, tables, sequences, allow_cascade=False):
"""
Returns a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The returned value also includes SQL statements required to reset DB
sequences passed in :param sequences:.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
The `allow_cascade` argument determines whether truncation may cascade
to tables with foreign keys pointing the tables being truncated.
PostgreSQL requires a cascade even if these tables are empty.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations must provide a sql_flush() method')
def sequence_reset_by_name_sql(self, style, sequences):
"""
Returns a list of the SQL statements required to reset sequences
passed in :param sequences:.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return []
def sequence_reset_sql(self, style, model_list):
"""
Returns a list of the SQL statements required to reset sequences for
the given models.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return [] # No sequence reset required by default.
def start_transaction_sql(self):
"""
Returns the SQL statement required to start a transaction.
"""
return "BEGIN;"
def end_transaction_sql(self, success=True):
"""
Returns the SQL statement required to end a transaction.
"""
if not success:
return "ROLLBACK;"
return "COMMIT;"
def tablespace_sql(self, tablespace, inline=False):
"""
Returns the SQL that will be used in a query to define the tablespace.
Returns '' if the backend doesn't support tablespaces.
If inline is True, the SQL is appended to a row; otherwise it's appended
to the entire CREATE TABLE or CREATE INDEX statement.
"""
return ''
def prep_for_like_query(self, x):
"""Prepares a value for use in a LIKE query."""
return force_text(x).replace("\\", "\\\\").replace("%", "\%").replace("_", "\_")
# Same as prep_for_like_query(), but called for "iexact" matches, which
# need not necessarily be implemented using "LIKE" in the backend.
prep_for_iexact_query = prep_for_like_query
def validate_autopk_value(self, value):
"""
Certain backends do not accept some values for "serial" fields
(for example zero in MySQL). This method will raise a ValueError
if the value is invalid, otherwise returns validated value.
"""
return value
def value_to_db_date(self, value):
"""
Transforms a date value to an object compatible with what is expected
by the backend driver for date columns.
"""
if value is None:
return None
return six.text_type(value)
def value_to_db_datetime(self, value):
"""
Transforms a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
"""
if value is None:
return None
return six.text_type(value)
def value_to_db_time(self, value):
"""
Transforms a time value to an object compatible with what is expected
by the backend driver for time columns.
"""
if value is None:
return None
if timezone.is_aware(value):
raise ValueError("Django does not support timezone-aware times.")
return six.text_type(value)
def value_to_db_decimal(self, value, max_digits, decimal_places):
"""
Transforms a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
"""
return utils.format_number(value, max_digits, decimal_places)
def value_to_db_ipaddress(self, value):
"""
Transforms a string representation of an IP address into the expected
type for the backend driver.
"""
return value or None
def year_lookup_bounds_for_date_field(self, value):
"""
Returns a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year
lookup.
`value` is an int, containing the looked-up year.
"""
first = datetime.date(value, 1, 1)
second = datetime.date(value, 12, 31)
return [first, second]
def year_lookup_bounds_for_datetime_field(self, value):
"""
Returns a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateTimeField value using a year
lookup.
`value` is an int, containing the looked-up year.
"""
first = datetime.datetime(value, 1, 1)
second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
if settings.USE_TZ:
tz = timezone.get_current_timezone()
first = timezone.make_aware(first, tz)
second = timezone.make_aware(second, tz)
return [first, second]
def get_db_converters(self, expression):
"""Get a list of functions needed to convert field data.
Some field types on some backends do not provide data in the correct
format, this is the hook for coverter functions.
"""
return []
def convert_durationfield_value(self, value, expression, connection, context):
if value is not None:
value = str(decimal.Decimal(value) / decimal.Decimal(1000000))
value = parse_duration(value)
return value
def check_aggregate_support(self, aggregate_func):
return self.check_expression_support(aggregate_func)
def check_expression_support(self, expression):
"""
Check that the backend supports the provided expression.
This is used on specific backends to rule out known expressions
that have problematic or nonexistent implementations. If the
expression has a known problem, the backend should raise
NotImplementedError.
"""
pass
def combine_expression(self, connector, sub_expressions):
"""Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions)
"""
conn = ' %s ' % connector
return conn.join(sub_expressions)
def combine_duration_expression(self, connector, sub_expressions):
return self.combine_expression(connector, sub_expressions)
def modify_insert_params(self, placeholders, params):
"""Allow modification of insert parameters. Needed for Oracle Spatial
backend due to #10888.
"""
return params
def integer_field_range(self, internal_type):
"""
Given an integer field internal type (e.g. 'PositiveIntegerField'),
returns a tuple of the (min_value, max_value) form representing the
range of the column type bound to the field.
"""
return self.integer_field_ranges[internal_type]
| {
"content_hash": "ef04ed5d4fd261b4da8f1b7cdba46cd2",
"timestamp": "",
"source": "github",
"line_count": 566,
"max_line_length": 117,
"avg_line_length": 37.86219081272085,
"alnum_prop": 0.6327111525898274,
"repo_name": "52ai/django-ccsds",
"id": "8a8209f7a6975264f561993593b837bfff337edb",
"size": "21430",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "django/db/backends/base/operations.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "43623"
},
{
"name": "HTML",
"bytes": "173769"
},
{
"name": "JavaScript",
"bytes": "106416"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "10925166"
},
{
"name": "Shell",
"bytes": "934"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.